From f1b483d832e145ab342c1c31ccaef9eeb8a935f5 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 4 Sep 2025 15:51:17 +0000 Subject: [PATCH 001/254] Enter prerelease mode --- .changeset/pre.json | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 .changeset/pre.json diff --git a/.changeset/pre.json b/.changeset/pre.json new file mode 100644 index 00000000000..84d911ba5c1 --- /dev/null +++ b/.changeset/pre.json @@ -0,0 +1,10 @@ +{ + "mode": "pre", + "tag": "alpha", + "initialVersions": { + "@apollo/client": "4.0.4", + "@apollo/client-graphql-codegen": "1.0.0", + "@apollo/client-codemod-migrate-3-to-4": "1.0.2" + }, + "changesets": [] +} From 2d6610d3c7967e7cb942db118afc0301c7d8f66c Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 3 Sep 2025 22:06:30 -0600 Subject: [PATCH 002/254] Add graphql-alpha.9 as dev dependency --- package-lock.json | 12 ++++++++++++ package.json | 1 + 2 files changed, 13 insertions(+) diff --git a/package-lock.json b/package-lock.json index bd60c4e9976..18dbd11a0c4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -82,6 +82,7 @@ "globals": "15.14.0", "graphql": "16.9.0", "graphql-17-alpha2": "npm:graphql@17.0.0-alpha.2", + "graphql-17-alpha9": "npm:graphql@17.0.0-alpha.9", "graphql-ws": "6.0.3", "jest": "29.7.0", "jest-environment-jsdom": "29.7.0", @@ -11498,6 +11499,17 @@ "node": "^14.19.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/graphql-17-alpha9": { + "name": "graphql", + "version": "17.0.0-alpha.9", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-17.0.0-alpha.9.tgz", + "integrity": "sha512-jVK1BsvX5pUIEpRDlEgeKJr80GAxl3B8ISsFDjXHtl2xAxMXVGTEFF4Q4R8NH0Gw7yMwcHDndkNjoNT5CbwHKA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^16.19.0 || ^18.14.0 || >=19.7.0" + } + }, "node_modules/graphql-config": { "version": "5.1.5", "resolved": "https://registry.npmjs.org/graphql-config/-/graphql-config-5.1.5.tgz", diff --git a/package.json b/package.json index 71218fb900e..18c08e9fdca 100644 --- a/package.json +++ b/package.json @@ -214,6 +214,7 @@ "globals": "15.14.0", "graphql": "16.9.0", "graphql-17-alpha2": "npm:graphql@17.0.0-alpha.2", + "graphql-17-alpha9": "npm:graphql@17.0.0-alpha.9", "graphql-ws": "6.0.3", "jest": "29.7.0", "jest-environment-jsdom": "29.7.0", From 217905c455defd29812543918b4c9c52e178a21e Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 3 Sep 2025 22:18:58 -0600 Subject: [PATCH 003/254] [WIP] copy over alpha.9 tests --- .../__tests__/graphql17Alpha9.test.ts | 2581 +++++++++++++++++ 1 file changed, 2581 insertions(+) create mode 100644 src/incremental/handlers/__tests__/graphql17Alpha9.test.ts diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts new file mode 100644 index 00000000000..4764884965e --- /dev/null +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -0,0 +1,2581 @@ +import assert from "node:assert"; + +import type { + DocumentNode, + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, +} from "graphql-17-alpha9"; +import { + experimentalExecuteIncrementally, + GraphQLID, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, +} from "graphql-17-alpha9"; + +import { gql } from "@apollo/client"; + +// This is the test setup of the `graphql-js` v17.0.0-alpha.9 release: +// https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/defer-test.ts + +const friendType = new GraphQLObjectType({ + fields: { + id: { type: GraphQLID }, + name: { type: GraphQLString }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, + }, + name: "Friend", +}); + +const friends = [ + { name: "Han", id: 2 }, + { name: "Leia", id: 3 }, + { name: "C-3PO", id: 4 }, +]; + +const deeperObject = new GraphQLObjectType({ + fields: { + foo: { type: GraphQLString }, + bar: { type: GraphQLString }, + baz: { type: GraphQLString }, + bak: { type: GraphQLString }, + }, + name: "DeeperObject", +}); + +const nestedObject = new GraphQLObjectType({ + fields: { + deeperObject: { type: deeperObject }, + name: { type: GraphQLString }, + }, + name: "NestedObject", +}); + +const anotherNestedObject = new GraphQLObjectType({ + fields: { + deeperObject: { type: deeperObject }, + }, + name: "AnotherNestedObject", +}); + +const hero = { + name: "Luke", + id: 1, + friends, + nestedObject, + anotherNestedObject, +}; + +const c = new GraphQLObjectType({ + fields: { + d: { type: GraphQLString }, + nonNullErrorField: { type: new GraphQLNonNull(GraphQLString) }, + }, + name: "c", +}); + +const e = new GraphQLObjectType({ + fields: { + f: { type: GraphQLString }, + }, + name: "e", +}); + +const b = new GraphQLObjectType({ + fields: { + c: { type: c }, + e: { type: e }, + }, + name: "b", +}); + +const a = new GraphQLObjectType({ + fields: { + b: { type: b }, + someField: { type: GraphQLString }, + }, + name: "a", +}); + +const g = new GraphQLObjectType({ + fields: { + h: { type: GraphQLString }, + }, + name: "g", +}); + +const heroType = new GraphQLObjectType({ + fields: { + id: { type: GraphQLID }, + name: { type: GraphQLString }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, + friends: { + type: new GraphQLList(friendType), + }, + nestedObject: { type: nestedObject }, + anotherNestedObject: { type: anotherNestedObject }, + }, + name: "Hero", +}); + +const query = new GraphQLObjectType({ + fields: { + hero: { + type: heroType, + }, + a: { type: a }, + g: { type: g }, + }, + name: "Query", +}); + +const schema = new GraphQLSchema({ query }); + +async function* run( + document: DocumentNode, + rootValue: unknown = { hero }, + enableEarlyExecution = false +): AsyncGenerator< + | FormattedInitialIncrementalExecutionResult + | FormattedSubsequentIncrementalExecutionResult +> { + const result = await experimentalExecuteIncrementally({ + schema, + document, + rootValue, + enableEarlyExecution, + }); + + if ("initialResult" in result) { + yield JSON.parse( + JSON.stringify(result.initialResult) + ) as FormattedInitialIncrementalExecutionResult; + + for await (const incremental of result.subsequentResults) { + yield JSON.parse( + JSON.stringify(incremental) + ) as FormattedSubsequentIncrementalExecutionResult; + } + } else { + return result; + } +} + +describe("graphql-js test cases", () => { + // These test cases mirror defer tests of the `graphql-js` v17.0.0-alpha.9 release: + // https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/defer-test.ts + + it("Can defer fragments containing scalar types", async () => { + const query = gql` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + `; + const incoming = run(query); + + expectJSON(incoming).toDeepEqual([ + { + data: { + hero: { + id: "1", + }, + }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + incremental: [ + { + data: { + name: "Luke", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + }); + it("Can disable defer using if argument", async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer(if: false) + } + } + fragment NameFragment on Hero { + name + } + `); + const result = await run(document); + + expectJSON(result).toDeepEqual({ + data: { + hero: { + id: "1", + name: "Luke", + }, + }, + }); + }); + it("Does not disable defer with null if argument", async () => { + const document = parse(` + query HeroNameQuery($shouldDefer: Boolean) { + hero { + id + ...NameFragment @defer(if: $shouldDefer) + } + } + fragment NameFragment on Hero { + name + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual([ + { + data: { hero: { id: "1" } }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + incremental: [ + { + data: { name: "Luke" }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + }); + it("Does not execute deferred fragments early when not specified", async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + `); + const order: Array = []; + const result = await run(document, { + hero: { + ...hero, + id: async () => { + await resolveOnNextTick(); + await resolveOnNextTick(); + order.push("slow-id"); + return hero.id; + }, + name: () => { + order.push("fast-name"); + return hero.name; + }, + }, + }); + + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + id: "1", + }, + }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + incremental: [ + { + data: { + name: "Luke", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + expect(order).to.deep.equal(["slow-id", "fast-name"]); + }); + it("Does execute deferred fragments early when specified", async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + `); + const order: Array = []; + const result = await run( + document, + { + hero: { + ...hero, + id: async () => { + await resolveOnNextTick(); + await resolveOnNextTick(); + order.push("slow-id"); + return hero.id; + }, + name: () => { + order.push("fast-name"); + return hero.name; + }, + }, + }, + true + ); + + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + id: "1", + }, + }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + incremental: [ + { + data: { + name: "Luke", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + expect(order).to.deep.equal(["fast-name", "slow-id"]); + }); + it("Can defer fragments on the top level Query field", async () => { + const document = parse(` + query HeroNameQuery { + ...QueryFragment @defer(label: "DeferQuery") + } + fragment QueryFragment on Query { + hero { + id + } + } + `); + const result = await run(document); + + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [{ id: "0", path: [], label: "DeferQuery" }], + hasNext: true, + }, + { + incremental: [ + { + data: { + hero: { + id: "1", + }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + }); + it("Can defer fragments with errors on the top level Query field", async () => { + const document = parse(` + query HeroNameQuery { + ...QueryFragment @defer(label: "DeferQuery") + } + fragment QueryFragment on Query { + hero { + name + } + } + `); + const result = await run(document, { + hero: { + ...hero, + name: () => { + throw new Error("bad"); + }, + }, + }); + + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [{ id: "0", path: [], label: "DeferQuery" }], + hasNext: true, + }, + { + incremental: [ + { + data: { + hero: { + name: null, + }, + }, + errors: [ + { + message: "bad", + locations: [{ line: 7, column: 11 }], + path: ["hero", "name"], + }, + ], + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + }); + it("Can defer a fragment within an already deferred fragment", async () => { + const document = parse(` + query HeroNameQuery { + hero { + ...TopFragment @defer(label: "DeferTop") + } + } + fragment TopFragment on Hero { + id + ...NestedFragment @defer(label: "DeferNested") + } + fragment NestedFragment on Hero { + friends { + name + } + } + `); + const result = await run(document); + + expectJSON(result).toDeepEqual([ + { + data: { + hero: {}, + }, + pending: [{ id: "0", path: ["hero"], label: "DeferTop" }], + hasNext: true, + }, + { + pending: [{ id: "1", path: ["hero"], label: "DeferNested" }], + incremental: [ + { + data: { + id: "1", + }, + id: "0", + }, + { + data: { + friends: [{ name: "Han" }, { name: "Leia" }, { name: "C-3PO" }], + }, + id: "1", + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }, + ]); + }); + it("Can defer a fragment that is also not deferred, deferred fragment is first", async () => { + const document = parse(` + query HeroNameQuery { + hero { + ...TopFragment @defer(label: "DeferTop") + ...TopFragment + } + } + fragment TopFragment on Hero { + name + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual({ + data: { + hero: { + name: "Luke", + }, + }, + }); + }); + it("Can defer a fragment that is also not deferred, non-deferred fragment is first", async () => { + const document = parse(` + query HeroNameQuery { + hero { + ...TopFragment + ...TopFragment @defer(label: "DeferTop") + } + } + fragment TopFragment on Hero { + name + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual({ + data: { + hero: { + name: "Luke", + }, + }, + }); + }); + + it("Can defer an inline fragment", async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ... on Hero @defer(label: "InlineDeferred") { + name + } + } + } + `); + const result = await run(document); + + expectJSON(result).toDeepEqual([ + { + data: { hero: { id: "1" } }, + pending: [{ id: "0", path: ["hero"], label: "InlineDeferred" }], + hasNext: true, + }, + { + incremental: [{ data: { name: "Luke" }, id: "0" }], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + }); + + it("Does not emit empty defer fragments", async () => { + const document = parse(` + query HeroNameQuery { + hero { + ... @defer { + name @skip(if: true) + } + } + } + fragment TopFragment on Hero { + name + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual({ + data: { + hero: {}, + }, + }); + }); + + it("Emits children of empty defer fragments", async () => { + const document = parse(` + query HeroNameQuery { + hero { + ... @defer { + ... @defer { + name + } + } + } + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual([ + { + data: { + hero: {}, + }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + incremental: [{ data: { name: "Luke" }, id: "0" }], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + }); + + it("Can separately emit defer fragments with different labels with varying fields", async () => { + const document = parse(` + query HeroNameQuery { + hero { + ... @defer(label: "DeferID") { + id + } + ... @defer(label: "DeferName") { + name + } + } + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual([ + { + data: { + hero: {}, + }, + pending: [ + { id: "0", path: ["hero"], label: "DeferID" }, + { id: "1", path: ["hero"], label: "DeferName" }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { + id: "1", + }, + id: "0", + }, + { + data: { + name: "Luke", + }, + id: "1", + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }, + ]); + }); + + it("Separately emits defer fragments with different labels with varying subfields", async () => { + const document = parse(` + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: "0", path: [], label: "DeferID" }, + { id: "1", path: [], label: "DeferName" }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { hero: {} }, + id: "0", + }, + { + data: { id: "1" }, + id: "0", + subPath: ["hero"], + }, + { + data: { name: "Luke" }, + id: "1", + subPath: ["hero"], + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }, + ]); + }); + + it("Separately emits defer fragments with different labels with varying subfields that return promises", async () => { + const document = parse(` + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + `); + const result = await run(document, { + hero: { + id: () => Promise.resolve("1"), + name: () => Promise.resolve("Luke"), + }, + }); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: "0", path: [], label: "DeferID" }, + { id: "1", path: [], label: "DeferName" }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { hero: {} }, + id: "0", + }, + { + data: { id: "1" }, + id: "0", + subPath: ["hero"], + }, + { + data: { name: "Luke" }, + id: "1", + subPath: ["hero"], + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }, + ]); + }); + + it("Separately emits defer fragments with varying subfields of same priorities but different level of defers", async () => { + const document = parse(` + query HeroNameQuery { + hero { + ... @defer(label: "DeferID") { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual([ + { + data: { + hero: {}, + }, + pending: [ + { id: "0", path: ["hero"], label: "DeferID" }, + { id: "1", path: [], label: "DeferName" }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { + id: "1", + }, + id: "0", + }, + { + data: { + name: "Luke", + }, + id: "1", + subPath: ["hero"], + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }, + ]); + }); + + it("Separately emits nested defer fragments with varying subfields of same priorities but different level of defers", async () => { + const document = parse(` + query HeroNameQuery { + ... @defer(label: "DeferName") { + hero { + name + ... @defer(label: "DeferID") { + id + } + } + } + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [{ id: "0", path: [], label: "DeferName" }], + hasNext: true, + }, + { + pending: [{ id: "1", path: ["hero"], label: "DeferID" }], + incremental: [ + { + data: { + hero: { + name: "Luke", + }, + }, + id: "0", + }, + { + data: { + id: "1", + }, + id: "1", + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }, + ]); + }); + + it("Initiates deferred grouped field sets only if they have been released as pending", async () => { + const document = parse(` + query { + ... @defer { + a { + ... @defer { + b { + c { d } + } + } + } + } + ... @defer { + a { + someField + ... @defer { + b { + e { f } + } + } + } + } + } + `); + + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + let cResolverCalled = false; + let eResolverCalled = false; + const executeResult = experimentalExecuteIncrementally({ + schema, + document, + rootValue: { + a: { + someField: slowFieldPromise, + b: { + c: () => { + cResolverCalled = true; + return { d: "d" }; + }, + e: () => { + eResolverCalled = true; + return { f: "f" }; + }, + }, + }, + }, + enableEarlyExecution: false, + }); + + assert("initialResult" in executeResult); + + const result1 = executeResult.initialResult; + expectJSON(result1).toDeepEqual({ + data: {}, + pending: [ + { id: "0", path: [] }, + { id: "1", path: [] }, + ], + hasNext: true, + }); + + const iterator = executeResult.subsequentResults[Symbol.asyncIterator](); + + expect(cResolverCalled).to.equal(false); + expect(eResolverCalled).to.equal(false); + + const result2 = await iterator.next(); + expectJSON(result2).toDeepEqual({ + value: { + pending: [{ id: "2", path: ["a"] }], + incremental: [ + { + data: { a: {} }, + id: "0", + }, + { + data: { b: {} }, + id: "2", + }, + { + data: { c: { d: "d" } }, + id: "2", + subPath: ["b"], + }, + ], + completed: [{ id: "0" }, { id: "2" }], + hasNext: true, + }, + done: false, + }); + + expect(cResolverCalled).to.equal(true); + expect(eResolverCalled).to.equal(false); + + resolveSlowField("someField"); + + const result3 = await iterator.next(); + expectJSON(result3).toDeepEqual({ + value: { + pending: [{ id: "3", path: ["a"] }], + incremental: [ + { + data: { someField: "someField" }, + id: "1", + subPath: ["a"], + }, + { + data: { e: { f: "f" } }, + id: "3", + subPath: ["b"], + }, + ], + completed: [{ id: "1" }, { id: "3" }], + hasNext: false, + }, + done: false, + }); + + expect(eResolverCalled).to.equal(true); + + const result4 = await iterator.next(); + expectJSON(result4).toDeepEqual({ + value: undefined, + done: true, + }); + }); + + it("Initiates unique deferred grouped field sets after those that are common to sibling defers", async () => { + const document = parse(` + query { + ... @defer { + a { + ... @defer { + b { + c { d } + } + } + } + } + ... @defer { + a { + ... @defer { + b { + c { d } + e { f } + } + } + } + } + } + `); + + const { promise: cPromise, resolve: resolveC } = + promiseWithResolvers(); + let cResolverCalled = false; + let eResolverCalled = false; + const executeResult = experimentalExecuteIncrementally({ + schema, + document, + rootValue: { + a: { + b: { + c: async () => { + cResolverCalled = true; + await cPromise; + return { d: "d" }; + }, + e: () => { + eResolverCalled = true; + return { f: "f" }; + }, + }, + }, + }, + enableEarlyExecution: false, + }); + + assert("initialResult" in executeResult); + + const result1 = executeResult.initialResult; + expectJSON(result1).toDeepEqual({ + data: {}, + pending: [ + { id: "0", path: [] }, + { id: "1", path: [] }, + ], + hasNext: true, + }); + + const iterator = executeResult.subsequentResults[Symbol.asyncIterator](); + + expect(cResolverCalled).to.equal(false); + expect(eResolverCalled).to.equal(false); + + const result2 = await iterator.next(); + expectJSON(result2).toDeepEqual({ + value: { + pending: [ + { id: "2", path: ["a"] }, + { id: "3", path: ["a"] }, + ], + incremental: [ + { + data: { a: {} }, + id: "0", + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: true, + }, + done: false, + }); + + resolveC(); + + expect(cResolverCalled).to.equal(true); + expect(eResolverCalled).to.equal(false); + + const result3 = await iterator.next(); + expectJSON(result3).toDeepEqual({ + value: { + incremental: [ + { + data: { b: { c: { d: "d" } } }, + id: "2", + }, + { + data: { e: { f: "f" } }, + id: "3", + subPath: ["b"], + }, + ], + completed: [{ id: "2" }, { id: "3" }], + hasNext: false, + }, + done: false, + }); + + const result4 = await iterator.next(); + expectJSON(result4).toDeepEqual({ + value: undefined, + done: true, + }); + }); + + it("Can deduplicate multiple defers on the same object", async () => { + const document = parse(` + query { + hero { + friends { + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + } + } + } + } + } + } + } + + fragment FriendFrag on Friend { + id + name + } + `); + const result = await run(document); + + expectJSON(result).toDeepEqual([ + { + data: { hero: { friends: [{}, {}, {}] } }, + pending: [ + { id: "0", path: ["hero", "friends", 0] }, + { id: "1", path: ["hero", "friends", 1] }, + { id: "2", path: ["hero", "friends", 2] }, + ], + hasNext: true, + }, + { + incremental: [ + { data: { id: "2", name: "Han" }, id: "0" }, + { data: { id: "3", name: "Leia" }, id: "1" }, + { data: { id: "4", name: "C-3PO" }, id: "2" }, + ], + completed: [{ id: "0" }, { id: "1" }, { id: "2" }], + hasNext: false, + }, + ]); + }); + + it("Deduplicates fields present in the initial payload", async () => { + const document = parse(` + query { + hero { + nestedObject { + deeperObject { + foo + } + } + anotherNestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + bar + } + } + anotherNestedObject { + deeperObject { + foo + } + } + } + } + } + `); + const result = await run(document, { + hero: { + nestedObject: { deeperObject: { foo: "foo", bar: "bar" } }, + anotherNestedObject: { deeperObject: { foo: "foo" } }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + nestedObject: { + deeperObject: { + foo: "foo", + }, + }, + anotherNestedObject: { + deeperObject: { + foo: "foo", + }, + }, + }, + }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + incremental: [ + { + data: { bar: "bar" }, + id: "0", + subPath: ["nestedObject", "deeperObject"], + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + }); + + it("Deduplicates fields present in a parent defer payload", async () => { + const document = parse(` + query { + hero { + ... @defer { + nestedObject { + deeperObject { + foo + ... @defer { + foo + bar + } + } + } + } + } + } + `); + const result = await run(document, { + hero: { nestedObject: { deeperObject: { foo: "foo", bar: "bar" } } }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: {}, + }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + pending: [{ id: "1", path: ["hero", "nestedObject", "deeperObject"] }], + incremental: [ + { + data: { + nestedObject: { + deeperObject: { foo: "foo" }, + }, + }, + id: "0", + }, + { + data: { + bar: "bar", + }, + id: "1", + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }, + ]); + }); + + it("Deduplicates fields with deferred fragments at multiple levels", async () => { + const document = parse(` + query { + hero { + nestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + foo + bar + } + ... @defer { + deeperObject { + foo + bar + baz + ... @defer { + foo + bar + baz + bak + } + } + } + } + } + } + } + `); + const result = await run(document, { + hero: { + nestedObject: { + deeperObject: { foo: "foo", bar: "bar", baz: "baz", bak: "bak" }, + }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + nestedObject: { + deeperObject: { + foo: "foo", + }, + }, + }, + }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + pending: [ + { id: "1", path: ["hero", "nestedObject"] }, + { id: "2", path: ["hero", "nestedObject", "deeperObject"] }, + ], + incremental: [ + { + data: { bar: "bar" }, + id: "0", + subPath: ["nestedObject", "deeperObject"], + }, + { + data: { baz: "baz" }, + id: "1", + subPath: ["deeperObject"], + }, + { + data: { bak: "bak" }, + id: "2", + }, + ], + completed: [{ id: "0" }, { id: "1" }, { id: "2" }], + hasNext: false, + }, + ]); + }); + + it("Deduplicates multiple fields from deferred fragments from different branches occurring at the same level", async () => { + const document = parse(` + query { + hero { + nestedObject { + deeperObject { + ... @defer { + foo + } + } + } + ... @defer { + nestedObject { + deeperObject { + ... @defer { + foo + bar + } + } + } + } + } + } + `); + const result = await run(document, { + hero: { nestedObject: { deeperObject: { foo: "foo", bar: "bar" } } }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + nestedObject: { + deeperObject: {}, + }, + }, + }, + pending: [ + { id: "0", path: ["hero", "nestedObject", "deeperObject"] }, + { id: "1", path: ["hero", "nestedObject", "deeperObject"] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { + foo: "foo", + }, + id: "0", + }, + { + data: { + bar: "bar", + }, + id: "1", + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }, + ]); + }); + + it("Deduplicate fields with deferred fragments in different branches at multiple non-overlapping levels", async () => { + const document = parse(` + query { + a { + b { + c { + d + } + ... @defer { + e { + f + } + } + } + } + ... @defer { + a { + b { + e { + f + } + } + } + g { + h + } + } + } + `); + const result = await run(document, { + a: { + b: { + c: { d: "d" }, + e: { f: "f" }, + }, + }, + g: { h: "h" }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + a: { + b: { + c: { + d: "d", + }, + }, + }, + }, + pending: [ + { id: "0", path: ["a", "b"] }, + { id: "1", path: [] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { e: { f: "f" } }, + id: "0", + }, + { + data: { g: { h: "h" } }, + id: "1", + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }, + ]); + }); + + it("Correctly bundles varying subfields into incremental data records unique by defer combination, ignoring fields in a fragment masked by a parent defer", async () => { + const document = parse(` + query HeroNameQuery { + ... @defer { + hero { + id + } + } + ... @defer { + hero { + name + shouldBeWithNameDespiteAdditionalDefer: name + ... @defer { + shouldBeWithNameDespiteAdditionalDefer: name + } + } + } + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: "0", path: [] }, + { id: "1", path: [] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { hero: {} }, + id: "0", + }, + { + data: { id: "1" }, + id: "0", + subPath: ["hero"], + }, + { + data: { + name: "Luke", + shouldBeWithNameDespiteAdditionalDefer: "Luke", + }, + id: "1", + subPath: ["hero"], + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }, + ]); + }); + + it("Nulls cross defer boundaries, null first", async () => { + const document = parse(` + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + `); + const result = await run(document, { + a: { b: { c: { d: "d" } }, someField: "someField" }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + a: {}, + }, + pending: [ + { id: "0", path: [] }, + { id: "1", path: ["a"] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { b: { c: {} } }, + id: "1", + }, + { + data: { d: "d" }, + id: "1", + subPath: ["b", "c"], + }, + ], + completed: [ + { + id: "0", + errors: [ + { + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 8, column: 17 }], + path: ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + { id: "1" }, + ], + hasNext: false, + }, + ]); + }); + + it("Nulls cross defer boundaries, value first", async () => { + const document = parse(` + query { + ... @defer { + a { + b { + c { + d + } + } + } + } + a { + ... @defer { + someField + b { + c { + nonNullErrorField + } + } + } + } + } + `); + const result = await run(document, { + a: { + b: { c: { d: "d" }, nonNullErrorFIeld: null }, + someField: "someField", + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + a: {}, + }, + pending: [ + { id: "0", path: [] }, + { id: "1", path: ["a"] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { b: { c: {} } }, + id: "1", + }, + { + data: { d: "d" }, + id: "0", + subPath: ["a", "b", "c"], + }, + ], + completed: [ + { id: "0" }, + { + id: "1", + errors: [ + { + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 17, column: 17 }], + path: ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + + it("Handles multiple erroring deferred grouped field sets", async () => { + const document = parse(` + query { + ... @defer { + a { + b { + c { + someError: nonNullErrorField + } + } + } + } + ... @defer { + a { + b { + c { + anotherError: nonNullErrorField + } + } + } + } + } + `); + const result = await run(document, { + a: { + b: { c: { nonNullErrorField: null } }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: "0", path: [] }, + { id: "1", path: [] }, + ], + hasNext: true, + }, + { + completed: [ + { + id: "0", + errors: [ + { + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 7, column: 17 }], + path: ["a", "b", "c", "someError"], + }, + ], + }, + { + id: "1", + errors: [ + { + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 16, column: 17 }], + path: ["a", "b", "c", "anotherError"], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + + it("Handles multiple erroring deferred grouped field sets for the same fragment", async () => { + const document = parse(` + query { + ... @defer { + a { + b { + someC: c { + d: d + } + anotherC: c { + d: d + } + } + } + } + ... @defer { + a { + b { + someC: c { + someError: nonNullErrorField + } + anotherC: c { + anotherError: nonNullErrorField + } + } + } + } + } + `); + const result = await run(document, { + a: { + b: { c: { d: "d", nonNullErrorField: null } }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: "0", path: [] }, + { id: "1", path: [] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { a: { b: { someC: {}, anotherC: {} } } }, + id: "0", + }, + { + data: { d: "d" }, + id: "0", + subPath: ["a", "b", "someC"], + }, + { + data: { d: "d" }, + id: "0", + subPath: ["a", "b", "anotherC"], + }, + ], + completed: [ + { + id: "1", + errors: [ + { + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 19, column: 17 }], + path: ["a", "b", "someC", "someError"], + }, + ], + }, + { id: "0" }, + ], + hasNext: false, + }, + ]); + }); + + it("filters a payload with a null that cannot be merged", async () => { + const document = parse(` + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + `); + const result = await run( + document, + { + a: { + b: { + c: { + d: "d", + nonNullErrorField: async () => { + await resolveOnNextTick(); + return null; + }, + }, + }, + someField: "someField", + }, + }, + true + ); + expectJSON(result).toDeepEqual([ + { + data: { + a: {}, + }, + pending: [ + { id: "0", path: [] }, + { id: "1", path: ["a"] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { b: { c: {} } }, + id: "1", + }, + { + data: { d: "d" }, + id: "1", + subPath: ["b", "c"], + }, + ], + completed: [{ id: "1" }], + hasNext: true, + }, + { + completed: [ + { + id: "0", + errors: [ + { + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 8, column: 17 }], + path: ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + + it("Cancels deferred fields when initial result exhibits null bubbling", async () => { + const document = parse(` + query { + hero { + nonNullName + } + ... @defer { + hero { + name + } + } + } + `); + const result = await run( + document, + { + hero: { + ...hero, + nonNullName: () => null, + }, + }, + true + ); + expectJSON(result).toDeepEqual({ + data: { + hero: null, + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Hero.nonNullName.", + locations: [{ line: 4, column: 11 }], + path: ["hero", "nonNullName"], + }, + ], + }); + }); + + it("Cancels deferred fields when deferred result exhibits null bubbling", async () => { + const document = parse(` + query { + ... @defer { + hero { + nonNullName + name + } + } + } + `); + const result = await run( + document, + { + hero: { + ...hero, + nonNullName: () => null, + }, + }, + true + ); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [{ id: "0", path: [] }], + hasNext: true, + }, + { + incremental: [ + { + data: { + hero: null, + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Hero.nonNullName.", + locations: [{ line: 5, column: 13 }], + path: ["hero", "nonNullName"], + }, + ], + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + }); + + it("Deduplicates list fields", async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual({ + data: { + hero: { + friends: [{ name: "Han" }, { name: "Leia" }, { name: "C-3PO" }], + }, + }, + }); + }); + + it("Deduplicates async iterable list fields", async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await run(document, { + hero: { + ...hero, + friends: async function* resolve() { + yield await Promise.resolve(friends[0]); + }, + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { friends: [{ name: "Han" }] } }, + }); + }); + + it("Deduplicates empty async iterable list fields", async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await run(document, { + hero: { + ...hero, + + friends: async function* resolve() { + await resolveOnNextTick(); + }, + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { friends: [] } }, + }); + }); + + it("Does not deduplicate list fields with non-overlapping fields", async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + id + } + } + } + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + friends: [{ name: "Han" }, { name: "Leia" }, { name: "C-3PO" }], + }, + }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + incremental: [ + { + data: { id: "2" }, + id: "0", + subPath: ["friends", 0], + }, + { + data: { id: "3" }, + id: "0", + subPath: ["friends", 1], + }, + { + data: { id: "4" }, + id: "0", + subPath: ["friends", 2], + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + }); + + it("Deduplicates list fields that return empty lists", async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await run(document, { + hero: { + ...hero, + friends: () => [], + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { friends: [] } }, + }); + }); + + it("Deduplicates null object fields", async () => { + const document = parse(` + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + `); + const result = await run(document, { + hero: { + ...hero, + nestedObject: () => null, + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { nestedObject: null } }, + }); + }); + + it("Deduplicates promise object fields", async () => { + const document = parse(` + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + `); + const result = await run(document, { + hero: { + nestedObject: () => Promise.resolve({ name: "foo" }), + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { nestedObject: { name: "foo" } } }, + }); + }); + + it("Handles errors thrown in deferred fragments", async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + `); + const result = await run(document, { + hero: { + ...hero, + name: () => { + throw new Error("bad"); + }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { hero: { id: "1" } }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + incremental: [ + { + data: { name: null }, + id: "0", + errors: [ + { + message: "bad", + locations: [{ line: 9, column: 9 }], + path: ["hero", "name"], + }, + ], + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }, + ]); + }); + it("Handles non-nullable errors thrown in deferred fragments", async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + nonNullName + } + `); + const result = await run(document, { + hero: { + ...hero, + nonNullName: () => null, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { hero: { id: "1" } }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + completed: [ + { + id: "0", + errors: [ + { + message: + "Cannot return null for non-nullable field Hero.nonNullName.", + locations: [{ line: 9, column: 9 }], + path: ["hero", "nonNullName"], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + it("Handles non-nullable errors thrown outside deferred fragments", async () => { + const document = parse(` + query HeroNameQuery { + hero { + nonNullName + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + id + } + `); + const result = await run(document, { + hero: { + ...hero, + nonNullName: () => null, + }, + }); + expectJSON(result).toDeepEqual({ + errors: [ + { + message: + "Cannot return null for non-nullable field Hero.nonNullName.", + locations: [ + { + line: 4, + column: 11, + }, + ], + path: ["hero", "nonNullName"], + }, + ], + data: { + hero: null, + }, + }); + }); + it("Handles async non-nullable errors thrown in deferred fragments", async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + nonNullName + } + `); + const result = await run(document, { + hero: { + ...hero, + nonNullName: () => Promise.resolve(null), + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { hero: { id: "1" } }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + completed: [ + { + id: "0", + errors: [ + { + message: + "Cannot return null for non-nullable field Hero.nonNullName.", + locations: [{ line: 9, column: 9 }], + path: ["hero", "nonNullName"], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + it("Returns payloads in correct order", async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + friends { + ...NestedFragment @defer + } + } + fragment NestedFragment on Friend { + name + } + `); + const result = await run(document, { + hero: { + ...hero, + name: async () => { + await resolveOnNextTick(); + return "slow"; + }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { id: "1" }, + }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + pending: [ + { id: "1", path: ["hero", "friends", 0] }, + { id: "2", path: ["hero", "friends", 1] }, + { id: "3", path: ["hero", "friends", 2] }, + ], + incremental: [ + { + data: { name: "slow", friends: [{}, {}, {}] }, + id: "0", + }, + { data: { name: "Han" }, id: "1" }, + { data: { name: "Leia" }, id: "2" }, + { data: { name: "C-3PO" }, id: "3" }, + ], + completed: [{ id: "0" }, { id: "1" }, { id: "2" }, { id: "3" }], + hasNext: false, + }, + ]); + }); + it("Returns payloads from synchronous data in correct order", async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + friends { + ...NestedFragment @defer + } + } + fragment NestedFragment on Friend { + name + } + `); + const result = await run(document); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { id: "1" }, + }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }, + { + pending: [ + { id: "1", path: ["hero", "friends", 0] }, + { id: "2", path: ["hero", "friends", 1] }, + { id: "3", path: ["hero", "friends", 2] }, + ], + incremental: [ + { + data: { + name: "Luke", + friends: [{}, {}, {}], + }, + id: "0", + }, + { data: { name: "Han" }, id: "1" }, + { data: { name: "Leia" }, id: "2" }, + { data: { name: "C-3PO" }, id: "3" }, + ], + completed: [{ id: "0" }, { id: "1" }, { id: "2" }, { id: "3" }], + hasNext: false, + }, + ]); + }); + + it("Filters deferred payloads when a list item returned by an async iterable is nulled", async () => { + const document = parse(` + query { + hero { + friends { + nonNullName + ...NameFragment @defer + } + } + } + fragment NameFragment on Friend { + name + } + `); + const result = await run(document, { + hero: { + ...hero, + async *friends() { + yield await Promise.resolve({ + ...friends[0], + nonNullName: () => Promise.resolve(null), + }); + }, + }, + }); + expectJSON(result).toDeepEqual({ + data: { + hero: { + friends: [null], + }, + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Friend.nonNullName.", + locations: [{ line: 5, column: 11 }], + path: ["hero", "friends", 0, "nonNullName"], + }, + ], + }); + }); + + it("original execute function throws error if anything is deferred and everything else is sync", () => { + const doc = ` + query Deferred { + ... @defer { hero { id } } + } + `; + expect(() => + execute({ + schema, + document: parse(doc), + rootValue: {}, + }) + ).to.throw( + "Executing this GraphQL operation would unexpectedly produce multiple payloads (due to @defer or @stream directive)" + ); + }); + + it("original execute function resolves to error if anything is deferred and something else is async", async () => { + const doc = ` + query Deferred { + hero { name } + ... @defer { hero { id } } + } + `; + await expectPromise( + execute({ + schema, + document: parse(doc), + rootValue: { + hero: { + ...hero, + name: async () => { + await resolveOnNextTick(); + return "slow"; + }, + }, + }, + }) + ).toRejectWith( + "Executing this GraphQL operation would unexpectedly produce multiple payloads (due to @defer or @stream directive)" + ); + }); +}); From 66d6c9be5dac12ab0e183757f6c1e88018a75ed7 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 00:01:08 -0600 Subject: [PATCH 004/254] Update the remaining tests to work with the handler --- .../__tests__/graphql17Alpha9.test.ts | 2785 ++++++++--------- 1 file changed, 1246 insertions(+), 1539 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index 4764884965e..fe79c212075 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -15,7 +15,13 @@ import { GraphQLString, } from "graphql-17-alpha9"; -import { gql } from "@apollo/client"; +import { ApolloLink, gql, Observable } from "@apollo/client"; + +import { + GraphQL17Alpha9Handler, + hasIncrementalChunks, + // eslint-disable-next-line local-rules/no-relative-imports +} from "../graphql17Alpha9.js"; // This is the test setup of the `graphql-js` v17.0.0-alpha.9 release: // https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/defer-test.ts @@ -133,6 +139,27 @@ const query = new GraphQLObjectType({ const schema = new GraphQLSchema({ query }); +function resolveOnNextTick(): Promise { + return Promise.resolve(undefined); +} + +type PromiseOrValue = Promise | T; + +function promiseWithResolvers(): { + promise: Promise; + resolve: (value: T | PromiseOrValue) => void; + reject: (reason?: any) => void; +} { + // these are assigned synchronously within the Promise constructor + let resolve!: (value: T | PromiseOrValue) => void; + let reject!: (reason?: any) => void; + const promise = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + return { promise, resolve, reject }; +} + async function* run( document: DocumentNode, rootValue: unknown = { hero }, @@ -163,6 +190,17 @@ async function* run( } } +const schemaLink = new ApolloLink((operation) => { + return new Observable((observer) => { + void (async () => { + for await (const chunk of run(operation.query)) { + observer.next(chunk); + } + observer.complete(); + })(); + }); +}); + describe("graphql-js test cases", () => { // These test cases mirror defer tests of the `graphql-js` v17.0.0-alpha.9 release: // https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/defer-test.ts @@ -179,34 +217,48 @@ describe("graphql-js test cases", () => { name } `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + const incoming = run(query); - expectJSON(incoming).toDeepEqual([ - { + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { id: "1", }, }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - incremental: [ - { - data: { - name: "Luke", - }, - id: "0", + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + id: "1", + name: "Luke", }, - ], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); + it("Can disable defer using if argument", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { hero { id @@ -216,163 +268,31 @@ describe("graphql-js test cases", () => { fragment NameFragment on Hero { name } - `); - const result = await run(document); + `; + const handler = new GraphQL17Alpha9Handler(); + const incoming = run(query); - expectJSON(result).toDeepEqual({ - data: { - hero: { - id: "1", - name: "Luke", - }, - }, - }); + const { value: chunk } = await incoming.next(); + + assert(chunk); + expect(handler.isIncrementalResult(chunk)).toBe(false); + expect(hasIncrementalChunks(chunk)).toBe(false); }); - it("Does not disable defer with null if argument", async () => { - const document = parse(` - query HeroNameQuery($shouldDefer: Boolean) { - hero { - id - ...NameFragment @defer(if: $shouldDefer) - } - } - fragment NameFragment on Hero { - name - } - `); - const result = await run(document); - expectJSON(result).toDeepEqual([ - { - data: { hero: { id: "1" } }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - incremental: [ - { - data: { name: "Luke" }, - id: "0", - }, - ], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); + + it.skip("Does not disable defer with null if argument", async () => { + // test is not interesting from a client perspective }); - it("Does not execute deferred fragments early when not specified", async () => { - const document = parse(` - query HeroNameQuery { - hero { - id - ...NameFragment @defer - } - } - fragment NameFragment on Hero { - name - } - `); - const order: Array = []; - const result = await run(document, { - hero: { - ...hero, - id: async () => { - await resolveOnNextTick(); - await resolveOnNextTick(); - order.push("slow-id"); - return hero.id; - }, - name: () => { - order.push("fast-name"); - return hero.name; - }, - }, - }); - expectJSON(result).toDeepEqual([ - { - data: { - hero: { - id: "1", - }, - }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - incremental: [ - { - data: { - name: "Luke", - }, - id: "0", - }, - ], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); - expect(order).to.deep.equal(["slow-id", "fast-name"]); + it.skip("Does not execute deferred fragments early when not specified", async () => { + // test is not interesting from a client perspective }); - it("Does execute deferred fragments early when specified", async () => { - const document = parse(` - query HeroNameQuery { - hero { - id - ...NameFragment @defer - } - } - fragment NameFragment on Hero { - name - } - `); - const order: Array = []; - const result = await run( - document, - { - hero: { - ...hero, - id: async () => { - await resolveOnNextTick(); - await resolveOnNextTick(); - order.push("slow-id"); - return hero.id; - }, - name: () => { - order.push("fast-name"); - return hero.name; - }, - }, - }, - true - ); - expectJSON(result).toDeepEqual([ - { - data: { - hero: { - id: "1", - }, - }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - incremental: [ - { - data: { - name: "Luke", - }, - id: "0", - }, - ], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); - expect(order).to.deep.equal(["fast-name", "slow-id"]); + it.skip("Does execute deferred fragments early when specified", async () => { + // test is not interesting from a client perspective }); + it("Can defer fragments on the top level Query field", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { ...QueryFragment @defer(label: "DeferQuery") } @@ -381,33 +301,44 @@ describe("graphql-js test cases", () => { id } } - `); - const result = await run(document); + `; - expectJSON(result).toDeepEqual([ - { + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, - pending: [{ id: "0", path: [], label: "DeferQuery" }], - hasNext: true, - }, - { - incremental: [ - { - data: { - hero: { - id: "1", - }, - }, - id: "0", + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + id: "1", }, - ], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); + it("Can defer fragments with errors on the top level Query field", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { ...QueryFragment @defer(label: "DeferQuery") } @@ -416,8 +347,11 @@ describe("graphql-js test cases", () => { name } } - `); - const result = await run(document, { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { hero: { ...hero, name: () => { @@ -426,37 +360,44 @@ describe("graphql-js test cases", () => { }, }); - expectJSON(result).toDeepEqual([ - { + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, - pending: [{ id: "0", path: [], label: "DeferQuery" }], - hasNext: true, - }, - { - incremental: [ + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + name: null, + }, + }, + errors: [ { - data: { - hero: { - name: null, - }, - }, - errors: [ - { - message: "bad", - locations: [{ line: 7, column: 11 }], - path: ["hero", "name"], - }, - ], - id: "0", + message: "bad", + locations: [{ line: 7, column: 11 }], + path: ["hero", "name"], }, ], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); + }); + expect(request.hasNext).toBe(false); + } }); + it("Can defer a fragment within an already deferred fragment", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { hero { ...TopFragment @defer(label: "DeferTop") @@ -471,83 +412,55 @@ describe("graphql-js test cases", () => { name } } - `); - const result = await run(document); + `; - expectJSON(result).toDeepEqual([ - { + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: {}, }, - pending: [{ id: "0", path: ["hero"], label: "DeferTop" }], - hasNext: true, - }, - { - pending: [{ id: "1", path: ["hero"], label: "DeferNested" }], - incremental: [ - { - data: { - id: "1", - }, - id: "0", - }, - { - data: { - friends: [{ name: "Han" }, { name: "Leia" }, { name: "C-3PO" }], - }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { id: "1", + friends: [{ name: "Han" }, { name: "Leia" }, { name: "C-3PO" }], }, - ], - completed: [{ id: "0" }, { id: "1" }], - hasNext: false, - }, - ]); - }); - it("Can defer a fragment that is also not deferred, deferred fragment is first", async () => { - const document = parse(` - query HeroNameQuery { - hero { - ...TopFragment @defer(label: "DeferTop") - ...TopFragment - } - } - fragment TopFragment on Hero { - name - } - `); - const result = await run(document); - expectJSON(result).toDeepEqual({ - data: { - hero: { - name: "Luke", }, - }, - }); + }); + expect(request.hasNext).toBe(false); + } }); - it("Can defer a fragment that is also not deferred, non-deferred fragment is first", async () => { - const document = parse(` - query HeroNameQuery { - hero { - ...TopFragment - ...TopFragment @defer(label: "DeferTop") - } - } - fragment TopFragment on Hero { - name - } - `); - const result = await run(document); - expectJSON(result).toDeepEqual({ - data: { - hero: { - name: "Luke", - }, - }, - }); + + it.skip("Can defer a fragment that is also not deferred, deferred fragment is first", async () => { + // from the client perspective, a regular graphql query + }); + + it.skip("Can defer a fragment that is also not deferred, non-deferred fragment is first", async () => { + // from the client perspective, a regular graphql query }); it("Can defer an inline fragment", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { hero { id @@ -556,46 +469,52 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document); + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); - expectJSON(result).toDeepEqual([ - { - data: { hero: { id: "1" } }, - pending: [{ id: "0", path: ["hero"], label: "InlineDeferred" }], - hasNext: true, - }, - { - incremental: [{ data: { name: "Luke" }, id: "0" }], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); + const incoming = run(query); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + id: "1", + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + id: "1", + name: "Luke", + }, + }, + }); + expect(request.hasNext).toBe(false); + } }); - it("Does not emit empty defer fragments", async () => { - const document = parse(` - query HeroNameQuery { - hero { - ... @defer { - name @skip(if: true) - } - } - } - fragment TopFragment on Hero { - name - } - `); - const result = await run(document); - expectJSON(result).toDeepEqual({ - data: { - hero: {}, - }, - }); + it.skip("Does not emit empty defer fragments", async () => { + // from the client perspective, a regular query }); it("Emits children of empty defer fragments", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { hero { ... @defer { @@ -605,26 +524,46 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document); - expectJSON(result).toDeepEqual([ - { + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: {}, }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - incremental: [{ data: { name: "Luke" }, id: "0" }], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); - }); + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + name: "Luke", + }, + }, + }); + expect(request.hasNext).toBe(false); + } + }); it("Can separately emit defer fragments with different labels with varying fields", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { hero { ... @defer(label: "DeferID") { @@ -635,42 +574,47 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document); - expectJSON(result).toDeepEqual([ - { + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: {}, }, - pending: [ - { id: "0", path: ["hero"], label: "DeferID" }, - { id: "1", path: ["hero"], label: "DeferName" }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { - id: "1", - }, - id: "0", - }, - { - data: { - name: "Luke", - }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { id: "1", + name: "Luke", }, - ], - completed: [{ id: "0" }, { id: "1" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Separately emits defer fragments with different labels with varying subfields", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { ... @defer(label: "DeferID") { hero { @@ -683,95 +627,49 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document); - expectJSON(result).toDeepEqual([ - { + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, - pending: [ - { id: "0", path: [], label: "DeferID" }, - { id: "1", path: [], label: "DeferName" }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { hero: {} }, - id: "0", - }, - { - data: { id: "1" }, - id: "0", - subPath: ["hero"], - }, - { - data: { name: "Luke" }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { id: "1", - subPath: ["hero"], + name: "Luke", }, - ], - completed: [{ id: "0" }, { id: "1" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); - it("Separately emits defer fragments with different labels with varying subfields that return promises", async () => { - const document = parse(` - query HeroNameQuery { - ... @defer(label: "DeferID") { - hero { - id - } - } - ... @defer(label: "DeferName") { - hero { - name - } - } - } - `); - const result = await run(document, { - hero: { - id: () => Promise.resolve("1"), - name: () => Promise.resolve("Luke"), - }, - }); - expectJSON(result).toDeepEqual([ - { - data: {}, - pending: [ - { id: "0", path: [], label: "DeferID" }, - { id: "1", path: [], label: "DeferName" }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { hero: {} }, - id: "0", - }, - { - data: { id: "1" }, - id: "0", - subPath: ["hero"], - }, - { - data: { name: "Luke" }, - id: "1", - subPath: ["hero"], - }, - ], - completed: [{ id: "0" }, { id: "1" }], - hasNext: false, - }, - ]); + it.skip("Separately emits defer fragments with different labels with varying subfields that return promises", async () => { + // from the client perspective, a repeat of the last one }); it("Separately emits defer fragments with varying subfields of same priorities but different level of defers", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { hero { ... @defer(label: "DeferID") { @@ -784,43 +682,47 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document); - expectJSON(result).toDeepEqual([ - { + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: {}, }, - pending: [ - { id: "0", path: ["hero"], label: "DeferID" }, - { id: "1", path: [], label: "DeferName" }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { - id: "1", - }, - id: "0", - }, - { - data: { - name: "Luke", - }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { id: "1", - subPath: ["hero"], + name: "Luke", }, - ], - completed: [{ id: "0" }, { id: "1" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Separately emits nested defer fragments with varying subfields of same priorities but different level of defers", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { ... @defer(label: "DeferName") { hero { @@ -831,40 +733,44 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document); - expectJSON(result).toDeepEqual([ - { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, - pending: [{ id: "0", path: [], label: "DeferName" }], - hasNext: true, - }, - { - pending: [{ id: "1", path: ["hero"], label: "DeferID" }], - incremental: [ - { - data: { - hero: { - name: "Luke", - }, - }, - id: "0", - }, - { - data: { - id: "1", - }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { id: "1", + name: "Luke", }, - ], - completed: [{ id: "0" }, { id: "1" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Initiates deferred grouped field sets only if they have been released as pending", async () => { - const document = parse(` + const query = gql` query { ... @defer { a { @@ -886,113 +792,82 @@ describe("graphql-js test cases", () => { } } } - `); + `; const { promise: slowFieldPromise, resolve: resolveSlowField } = promiseWithResolvers(); - let cResolverCalled = false; - let eResolverCalled = false; - const executeResult = experimentalExecuteIncrementally({ - schema, - document, - rootValue: { - a: { - someField: slowFieldPromise, - b: { - c: () => { - cResolverCalled = true; - return { d: "d" }; - }, - e: () => { - eResolverCalled = true; - return { f: "f" }; - }, + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + a: { + someField: slowFieldPromise, + b: { + c: () => { + return { d: "d" }; + }, + e: () => { + return { f: "f" }; }, }, }, - enableEarlyExecution: false, }); - assert("initialResult" in executeResult); - - const result1 = executeResult.initialResult; - expectJSON(result1).toDeepEqual({ - data: {}, - pending: [ - { id: "0", path: [] }, - { id: "1", path: [] }, - ], - hasNext: true, - }); + { + const { value: chunk, done } = await incoming.next(); - const iterator = executeResult.subsequentResults[Symbol.asyncIterator](); + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: {}, + }); + expect(request.hasNext).toBe(true); + } - expect(cResolverCalled).to.equal(false); - expect(eResolverCalled).to.equal(false); + { + const { value: chunk, done } = await incoming.next(); - const result2 = await iterator.next(); - expectJSON(result2).toDeepEqual({ - value: { - pending: [{ id: "2", path: ["a"] }], - incremental: [ - { - data: { a: {} }, - id: "0", - }, - { - data: { b: {} }, - id: "2", - }, - { - data: { c: { d: "d" } }, - id: "2", - subPath: ["b"], + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + a: { + b: { + c: { d: "d" }, + }, }, - ], - completed: [{ id: "0" }, { id: "2" }], - hasNext: true, - }, - done: false, - }); - - expect(cResolverCalled).to.equal(true); - expect(eResolverCalled).to.equal(false); + }, + }); + expect(request.hasNext).toBe(true); + } resolveSlowField("someField"); - const result3 = await iterator.next(); - expectJSON(result3).toDeepEqual({ - value: { - pending: [{ id: "3", path: ["a"] }], - incremental: [ - { - data: { someField: "someField" }, - id: "1", - subPath: ["a"], - }, - { - data: { e: { f: "f" } }, - id: "3", - subPath: ["b"], - }, - ], - completed: [{ id: "1" }, { id: "3" }], - hasNext: false, - }, - done: false, - }); - - expect(eResolverCalled).to.equal(true); + { + const { value: chunk, done } = await incoming.next(); - const result4 = await iterator.next(); - expectJSON(result4).toDeepEqual({ - value: undefined, - done: true, - }); + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + a: { + b: { + c: { d: "d" }, + e: { f: "f" }, + }, + someField: "someField", + }, + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Initiates unique deferred grouped field sets after those that are common to sibling defers", async () => { - const document = parse(` + const query = gql` query { ... @defer { a { @@ -1014,103 +889,77 @@ describe("graphql-js test cases", () => { } } } - `); + `; const { promise: cPromise, resolve: resolveC } = promiseWithResolvers(); - let cResolverCalled = false; - let eResolverCalled = false; - const executeResult = experimentalExecuteIncrementally({ - schema, - document, - rootValue: { - a: { - b: { - c: async () => { - cResolverCalled = true; - await cPromise; - return { d: "d" }; - }, - e: () => { - eResolverCalled = true; - return { f: "f" }; - }, + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + a: { + b: { + c: async () => { + await cPromise; + return { d: "d" }; + }, + e: () => { + return { f: "f" }; }, }, }, - enableEarlyExecution: false, }); - assert("initialResult" in executeResult); - - const result1 = executeResult.initialResult; - expectJSON(result1).toDeepEqual({ - data: {}, - pending: [ - { id: "0", path: [] }, - { id: "1", path: [] }, - ], - hasNext: true, - }); + { + const { value: chunk, done } = await incoming.next(); - const iterator = executeResult.subsequentResults[Symbol.asyncIterator](); + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: {}, + }); + expect(request.hasNext).toBe(true); + } - expect(cResolverCalled).to.equal(false); - expect(eResolverCalled).to.equal(false); + { + const { value: chunk, done } = await incoming.next(); - const result2 = await iterator.next(); - expectJSON(result2).toDeepEqual({ - value: { - pending: [ - { id: "2", path: ["a"] }, - { id: "3", path: ["a"] }, - ], - incremental: [ - { - data: { a: {} }, - id: "0", - }, - ], - completed: [{ id: "0" }, { id: "1" }], - hasNext: true, - }, - done: false, - }); + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + a: {}, + }, + }); + expect(request.hasNext).toBe(true); + } resolveC(); - expect(cResolverCalled).to.equal(true); - expect(eResolverCalled).to.equal(false); + { + const { value: chunk, done } = await incoming.next(); - const result3 = await iterator.next(); - expectJSON(result3).toDeepEqual({ - value: { - incremental: [ - { - data: { b: { c: { d: "d" } } }, - id: "2", - }, - { - data: { e: { f: "f" } }, - id: "3", - subPath: ["b"], + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + a: { + b: { + c: { d: "d" }, + e: { f: "f" }, + }, }, - ], - completed: [{ id: "2" }, { id: "3" }], - hasNext: false, - }, - done: false, - }); - - const result4 = await iterator.next(); - expectJSON(result4).toDeepEqual({ - value: undefined, - done: true, - }); + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Can deduplicate multiple defers on the same object", async () => { - const document = parse(` + const query = gql` query { hero { friends { @@ -1134,33 +983,52 @@ describe("graphql-js test cases", () => { id name } - `); - const result = await run(document); + `; - expectJSON(result).toDeepEqual([ - { - data: { hero: { friends: [{}, {}, {}] } }, - pending: [ - { id: "0", path: ["hero", "friends", 0] }, - { id: "1", path: ["hero", "friends", 1] }, - { id: "2", path: ["hero", "friends", 2] }, - ], - hasNext: true, - }, - { - incremental: [ - { data: { id: "2", name: "Han" }, id: "0" }, - { data: { id: "3", name: "Leia" }, id: "1" }, - { data: { id: "4", name: "C-3PO" }, id: "2" }, - ], - completed: [{ id: "0" }, { id: "1" }, { id: "2" }], - hasNext: false, - }, - ]); + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + friends: [{}, {}, {}], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + friends: [ + { id: "2", name: "Han" }, + { id: "3", name: "Leia" }, + { id: "4", name: "C-3PO" }, + ], + }, + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Deduplicates fields present in the initial payload", async () => { - const document = parse(` + const query = gql` query { hero { nestedObject { @@ -1187,15 +1055,24 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document, { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { hero: { nestedObject: { deeperObject: { foo: "foo", bar: "bar" } }, anotherNestedObject: { deeperObject: { foo: "foo" } }, }, }); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { nestedObject: { @@ -1210,25 +1087,39 @@ describe("graphql-js test cases", () => { }, }, }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - incremental: [ - { - data: { bar: "bar" }, - id: "0", - subPath: ["nestedObject", "deeperObject"], - }, - ], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + nestedObject: { + deeperObject: { + foo: "foo", + bar: "bar", + }, + }, + anotherNestedObject: { + deeperObject: { + foo: "foo", + }, + }, + }, + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Deduplicates fields present in a parent defer payload", async () => { - const document = parse(` + const query = gql` query { hero { ... @defer { @@ -1244,44 +1135,52 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document, { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { hero: { nestedObject: { deeperObject: { foo: "foo", bar: "bar" } } }, }); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: {}, }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - pending: [{ id: "1", path: ["hero", "nestedObject", "deeperObject"] }], - incremental: [ - { - data: { - nestedObject: { - deeperObject: { foo: "foo" }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + nestedObject: { + deeperObject: { + foo: "foo", + bar: "bar", }, }, - id: "0", - }, - { - data: { - bar: "bar", - }, - id: "1", }, - ], - completed: [{ id: "0" }, { id: "1" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Deduplicates fields with deferred fragments at multiple levels", async () => { - const document = parse(` + const query = gql` query { hero { nestedObject { @@ -1312,16 +1211,26 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document, { + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { hero: { nestedObject: { deeperObject: { foo: "foo", bar: "bar", baz: "baz", bak: "bak" }, }, }, }); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { nestedObject: { @@ -1331,38 +1240,36 @@ describe("graphql-js test cases", () => { }, }, }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - pending: [ - { id: "1", path: ["hero", "nestedObject"] }, - { id: "2", path: ["hero", "nestedObject", "deeperObject"] }, - ], - incremental: [ - { - data: { bar: "bar" }, - id: "0", - subPath: ["nestedObject", "deeperObject"], - }, - { - data: { baz: "baz" }, - id: "1", - subPath: ["deeperObject"], - }, - { - data: { bak: "bak" }, - id: "2", + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + nestedObject: { + deeperObject: { + foo: "foo", + bar: "bar", + baz: "baz", + bak: "bak", + }, + }, }, - ], - completed: [{ id: "0" }, { id: "1" }, { id: "2" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Deduplicates multiple fields from deferred fragments from different branches occurring at the same level", async () => { - const document = parse(` + const query = gql` query { hero { nestedObject { @@ -1384,12 +1291,22 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document, { + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { hero: { nestedObject: { deeperObject: { foo: "foo", bar: "bar" } } }, }); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { nestedObject: { @@ -1397,35 +1314,34 @@ describe("graphql-js test cases", () => { }, }, }, - pending: [ - { id: "0", path: ["hero", "nestedObject", "deeperObject"] }, - { id: "1", path: ["hero", "nestedObject", "deeperObject"] }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { - foo: "foo", - }, - id: "0", - }, - { - data: { - bar: "bar", + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + nestedObject: { + deeperObject: { + foo: "foo", + bar: "bar", + }, }, - id: "1", }, - ], - completed: [{ id: "0" }, { id: "1" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Deduplicate fields with deferred fragments in different branches at multiple non-overlapping levels", async () => { - const document = parse(` + const query = gql` query { a { b { @@ -1452,8 +1368,12 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document, { + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { a: { b: { c: { d: "d" }, @@ -1462,42 +1382,50 @@ describe("graphql-js test cases", () => { }, g: { h: "h" }, }); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { b: { - c: { - d: "d", - }, + c: { d: "d" }, }, }, }, - pending: [ - { id: "0", path: ["a", "b"] }, - { id: "1", path: [] }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { e: { f: "f" } }, - id: "0", + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + a: { + b: { + c: { d: "d" }, + e: { f: "f" }, + }, }, - { - data: { g: { h: "h" } }, - id: "1", + g: { + h: "h", }, - ], - completed: [{ id: "0" }, { id: "1" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Correctly bundles varying subfields into incremental data records unique by defer combination, ignoring fields in a fragment masked by a parent defer", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { ... @defer { hero { @@ -1514,45 +1442,45 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document); - expectJSON(result).toDeepEqual([ - { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, - pending: [ - { id: "0", path: [] }, - { id: "1", path: [] }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { hero: {} }, - id: "0", - }, - { - data: { id: "1" }, - id: "0", - subPath: ["hero"], - }, - { - data: { - name: "Luke", - shouldBeWithNameDespiteAdditionalDefer: "Luke", - }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { id: "1", - subPath: ["hero"], + name: "Luke", + shouldBeWithNameDespiteAdditionalDefer: "Luke", }, - ], - completed: [{ id: "0" }, { id: "1" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); it("Nulls cross defer boundaries, null first", async () => { - const document = parse(` + const query = gql` query { ... @defer { a { @@ -1574,54 +1502,57 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document, { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { a: { b: { c: { d: "d" } }, someField: "someField" }, }); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: {}, }, - pending: [ - { id: "0", path: [] }, - { id: "1", path: ["a"] }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { b: { c: {} } }, - id: "1", - }, - { - data: { d: "d" }, - id: "1", - subPath: ["b", "c"], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + a: { + b: { + c: { d: "d" }, + }, }, - ], - completed: [ + }, + errors: [ { - id: "0", - errors: [ - { - message: - "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 8, column: 17 }], - path: ["a", "b", "c", "nonNullErrorField"], - }, - ], + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 8, column: 17 }], + path: ["a", "b", "c", "nonNullErrorField"], }, - { id: "1" }, ], - hasNext: false, - }, - ]); + }); + expect(request.hasNext).toBe(false); + } }); it("Nulls cross defer boundaries, value first", async () => { - const document = parse(` + const query = gql` query { ... @defer { a { @@ -1643,57 +1574,60 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document, { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { a: { b: { c: { d: "d" }, nonNullErrorFIeld: null }, someField: "someField", }, }); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: {}, }, - pending: [ - { id: "0", path: [] }, - { id: "1", path: ["a"] }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { b: { c: {} } }, - id: "1", - }, - { - data: { d: "d" }, - id: "0", - subPath: ["a", "b", "c"], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + a: { + b: { + c: { d: "d" }, + }, }, - ], - completed: [ - { id: "0" }, + }, + errors: [ { - id: "1", - errors: [ - { - message: - "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 17, column: 17 }], - path: ["a", "b", "c", "nonNullErrorField"], - }, - ], + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 17, column: 17 }], + path: ["a", "b", "c", "nonNullErrorField"], }, ], - hasNext: false, - }, - ]); + }); + expect(request.hasNext).toBe(false); + } }); it("Handles multiple erroring deferred grouped field sets", async () => { - const document = parse(` + const query = gql` query { ... @defer { a { @@ -1714,53 +1648,57 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document, { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { a: { b: { c: { nonNullErrorField: null } }, }, }); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, - pending: [ - { id: "0", path: [] }, - { id: "1", path: [] }, - ], - hasNext: true, - }, - { - completed: [ + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: {}, + errors: [ { - id: "0", - errors: [ - { - message: - "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 7, column: 17 }], - path: ["a", "b", "c", "someError"], - }, - ], + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 7, column: 17 }], + path: ["a", "b", "c", "someError"], }, { - id: "1", - errors: [ - { - message: - "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 16, column: 17 }], - path: ["a", "b", "c", "anotherError"], - }, - ], + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 16, column: 17 }], + path: ["a", "b", "c", "anotherError"], }, ], - hasNext: false, - }, - ]); + }); + expect(request.hasNext).toBe(false); + } }); it("Handles multiple erroring deferred grouped field sets for the same fragment", async () => { - const document = parse(` + const query = gql` query { ... @defer { a { @@ -1787,59 +1725,58 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document, { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { a: { b: { c: { d: "d", nonNullErrorField: null } }, }, }); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, - pending: [ - { id: "0", path: [] }, - { id: "1", path: [] }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { a: { b: { someC: {}, anotherC: {} } } }, - id: "0", - }, - { - data: { d: "d" }, - id: "0", - subPath: ["a", "b", "someC"], - }, - { - data: { d: "d" }, - id: "0", - subPath: ["a", "b", "anotherC"], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + a: { + b: { + someC: { d: "d" }, + anotherC: { d: "d" }, + }, }, - ], - completed: [ + }, + errors: [ { - id: "1", - errors: [ - { - message: - "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 19, column: 17 }], - path: ["a", "b", "someC", "someError"], - }, - ], + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 19, column: 17 }], + path: ["a", "b", "someC", "someError"], }, - { id: "0" }, ], - hasNext: false, - }, - ]); + }); + expect(request.hasNext).toBe(false); + } }); it("filters a payload with a null that cannot be merged", async () => { - const document = parse(` + const query = gql` query { ... @defer { a { @@ -1861,9 +1798,12 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run( - document, + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run( + query, { a: { b: { @@ -1880,91 +1820,54 @@ describe("graphql-js test cases", () => { }, true ); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: {}, }, - pending: [ - { id: "0", path: [] }, - { id: "1", path: ["a"] }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { b: { c: {} } }, - id: "1", - }, - { - data: { d: "d" }, - id: "1", - subPath: ["b", "c"], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + a: { + b: { + c: { d: "d" }, + }, }, - ], - completed: [{ id: "1" }], - hasNext: true, - }, - { - completed: [ + }, + errors: [ { - id: "0", - errors: [ - { - message: - "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 8, column: 17 }], - path: ["a", "b", "c", "nonNullErrorField"], - }, - ], + message: + "Cannot return null for non-nullable field c.nonNullErrorField.", + locations: [{ line: 8, column: 17 }], + path: ["a", "b", "c", "nonNullErrorField"], }, ], - hasNext: false, - }, - ]); + }); + expect(request.hasNext).toBe(false); + } }); - it("Cancels deferred fields when initial result exhibits null bubbling", async () => { - const document = parse(` - query { - hero { - nonNullName - } - ... @defer { - hero { - name - } - } - } - `); - const result = await run( - document, - { - hero: { - ...hero, - nonNullName: () => null, - }, - }, - true - ); - expectJSON(result).toDeepEqual({ - data: { - hero: null, - }, - errors: [ - { - message: - "Cannot return null for non-nullable field Hero.nonNullName.", - locations: [{ line: 4, column: 11 }], - path: ["hero", "nonNullName"], - }, - ], - }); + it.skip("Cancels deferred fields when initial result exhibits null bubbling", async () => { + // from the client perspective, a regular graphql query }); it("Cancels deferred fields when deferred result exhibits null bubbling", async () => { - const document = parse(` + const query = gql` query { ... @defer { hero { @@ -1973,9 +1876,13 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run( - document, + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run( + query, { hero: { ...hero, @@ -1984,119 +1891,56 @@ describe("graphql-js test cases", () => { }, true ); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, - pending: [{ id: "0", path: [] }], - hasNext: true, - }, - { - incremental: [ + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: null, + }, + errors: [ { - data: { - hero: null, - }, - errors: [ - { - message: - "Cannot return null for non-nullable field Hero.nonNullName.", - locations: [{ line: 5, column: 13 }], - path: ["hero", "nonNullName"], - }, - ], - id: "0", + message: + "Cannot return null for non-nullable field Hero.nonNullName.", + locations: [{ line: 5, column: 13 }], + path: ["hero", "nonNullName"], }, ], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); + }); + expect(request.hasNext).toBe(false); + } }); - it("Deduplicates list fields", async () => { - const document = parse(` - query { - hero { - friends { - name - } - ... @defer { - friends { - name - } - } - } - } - `); - const result = await run(document); - expectJSON(result).toDeepEqual({ - data: { - hero: { - friends: [{ name: "Han" }, { name: "Leia" }, { name: "C-3PO" }], - }, - }, - }); + it.skip("Deduplicates list fields", async () => { + // from the client perspective, a regular query }); - it("Deduplicates async iterable list fields", async () => { - const document = parse(` - query { - hero { - friends { - name - } - ... @defer { - friends { - name - } - } - } - } - `); - const result = await run(document, { - hero: { - ...hero, - friends: async function* resolve() { - yield await Promise.resolve(friends[0]); - }, - }, - }); - expectJSON(result).toDeepEqual({ - data: { hero: { friends: [{ name: "Han" }] } }, - }); + it.skip("Deduplicates async iterable list fields", async () => { + // from the client perspective, a regular query }); - it("Deduplicates empty async iterable list fields", async () => { - const document = parse(` - query { - hero { - friends { - name - } - ... @defer { - friends { - name - } - } - } - } - `); - const result = await run(document, { - hero: { - ...hero, - - friends: async function* resolve() { - await resolveOnNextTick(); - }, - }, - }); - expectJSON(result).toDeepEqual({ - data: { hero: { friends: [] } }, - }); + it.skip("Deduplicates empty async iterable list fields", async () => { + // from the client perspective, a regular query }); it("Does not deduplicate list fields with non-overlapping fields", async () => { - const document = parse(` + const query = gql` query { hero { friends { @@ -2109,121 +1953,63 @@ describe("graphql-js test cases", () => { } } } - `); - const result = await run(document); - expectJSON(result).toDeepEqual([ - { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { friends: [{ name: "Han" }, { name: "Leia" }, { name: "C-3PO" }], }, }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - incremental: [ - { - data: { id: "2" }, - id: "0", - subPath: ["friends", 0], - }, - { - data: { id: "3" }, - id: "0", - subPath: ["friends", 1], - }, - { - data: { id: "4" }, - id: "0", - subPath: ["friends", 2], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + friends: [ + { id: "2", name: "Han" }, + { id: "3", name: "Leia" }, + { id: "4", name: "C-3PO" }, + ], }, - ], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); - it("Deduplicates list fields that return empty lists", async () => { - const document = parse(` - query { - hero { - friends { - name - } - ... @defer { - friends { - name - } - } - } - } - `); - const result = await run(document, { - hero: { - ...hero, - friends: () => [], - }, - }); - expectJSON(result).toDeepEqual({ - data: { hero: { friends: [] } }, - }); + it.skip("Deduplicates list fields that return empty lists", async () => { + // from the client perspective, a regular query }); - it("Deduplicates null object fields", async () => { - const document = parse(` - query { - hero { - nestedObject { - name - } - ... @defer { - nestedObject { - name - } - } - } - } - `); - const result = await run(document, { - hero: { - ...hero, - nestedObject: () => null, - }, - }); - expectJSON(result).toDeepEqual({ - data: { hero: { nestedObject: null } }, - }); + it.skip("Deduplicates null object fields", async () => { + // from the client perspective, a regular query }); - it("Deduplicates promise object fields", async () => { - const document = parse(` - query { - hero { - nestedObject { - name - } - ... @defer { - nestedObject { - name - } - } - } - } - `); - const result = await run(document, { - hero: { - nestedObject: () => Promise.resolve({ name: "foo" }), - }, - }); - expectJSON(result).toDeepEqual({ - data: { hero: { nestedObject: { name: "foo" } } }, - }); + it.skip("Deduplicates promise object fields", async () => { + // from the client perspective, a regular query }); it("Handles errors thrown in deferred fragments", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { hero { id @@ -2233,8 +2019,12 @@ describe("graphql-js test cases", () => { fragment NameFragment on Hero { name } - `); - const result = await run(document, { + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { hero: { ...hero, name: () => { @@ -2242,33 +2032,50 @@ describe("graphql-js test cases", () => { }, }, }); - expectJSON(result).toDeepEqual([ - { - data: { hero: { id: "1" } }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - incremental: [ + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + id: "1", + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + id: "1", + name: null, + }, + }, + errors: [ { - data: { name: null }, - id: "0", - errors: [ - { - message: "bad", - locations: [{ line: 9, column: 9 }], - path: ["hero", "name"], - }, - ], + message: "bad", + locations: [{ line: 9, column: 9 }], + path: ["hero", "name"], }, ], - completed: [{ id: "0" }], - hasNext: false, - }, - ]); + }); + expect(request.hasNext).toBe(false); + } }); + it("Handles non-nullable errors thrown in deferred fragments", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { hero { id @@ -2278,76 +2085,64 @@ describe("graphql-js test cases", () => { fragment NameFragment on Hero { nonNullName } - `); - const result = await run(document, { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { hero: { ...hero, nonNullName: () => null, }, }); - expectJSON(result).toDeepEqual([ - { - data: { hero: { id: "1" } }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - completed: [ + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + name: "Luke", + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + id: "1", + }, + }, + errors: [ { - id: "0", - errors: [ - { - message: - "Cannot return null for non-nullable field Hero.nonNullName.", - locations: [{ line: 9, column: 9 }], - path: ["hero", "nonNullName"], - }, - ], + message: + "Cannot return null for non-nullable field Hero.nonNullName.", + locations: [{ line: 9, column: 9 }], + path: ["hero", "nonNullName"], }, ], - hasNext: false, - }, - ]); + }); + expect(request.hasNext).toBe(false); + } }); - it("Handles non-nullable errors thrown outside deferred fragments", async () => { - const document = parse(` - query HeroNameQuery { - hero { - nonNullName - ...NameFragment @defer - } - } - fragment NameFragment on Hero { - id - } - `); - const result = await run(document, { - hero: { - ...hero, - nonNullName: () => null, - }, - }); - expectJSON(result).toDeepEqual({ - errors: [ - { - message: - "Cannot return null for non-nullable field Hero.nonNullName.", - locations: [ - { - line: 4, - column: 11, - }, - ], - path: ["hero", "nonNullName"], - }, - ], - data: { - hero: null, - }, - }); + + it.skip("Handles non-nullable errors thrown outside deferred fragments", async () => { + // from the client perspective, a regular query }); + it("Handles async non-nullable errors thrown in deferred fragments", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { hero { id @@ -2357,39 +2152,60 @@ describe("graphql-js test cases", () => { fragment NameFragment on Hero { nonNullName } - `); - const result = await run(document, { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { hero: { ...hero, nonNullName: () => Promise.resolve(null), }, }); - expectJSON(result).toDeepEqual([ - { - data: { hero: { id: "1" } }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - completed: [ + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + id: "1", + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + hero: { + id: "1", + }, + }, + errors: [ { - id: "0", - errors: [ - { - message: - "Cannot return null for non-nullable field Hero.nonNullName.", - locations: [{ line: 9, column: 9 }], - path: ["hero", "nonNullName"], - }, - ], + message: + "Cannot return null for non-nullable field Hero.nonNullName.", + locations: [{ line: 9, column: 9 }], + path: ["hero", "nonNullName"], }, ], - hasNext: false, - }, - ]); + }); + expect(request.hasNext).toBe(false); + } }); + it("Returns payloads in correct order", async () => { - const document = parse(` + const query = gql` query HeroNameQuery { hero { id @@ -2405,8 +2221,11 @@ describe("graphql-js test cases", () => { fragment NestedFragment on Friend { name } - `); - const result = await run(document, { + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { hero: { ...hero, name: async () => { @@ -2415,167 +2234,55 @@ describe("graphql-js test cases", () => { }, }, }); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { - hero: { id: "1" }, - }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - pending: [ - { id: "1", path: ["hero", "friends", 0] }, - { id: "2", path: ["hero", "friends", 1] }, - { id: "3", path: ["hero", "friends", 2] }, - ], - incremental: [ - { - data: { name: "slow", friends: [{}, {}, {}] }, - id: "0", + hero: { + id: "1", }, - { data: { name: "Han" }, id: "1" }, - { data: { name: "Leia" }, id: "2" }, - { data: { name: "C-3PO" }, id: "3" }, - ], - completed: [{ id: "0" }, { id: "1" }, { id: "2" }, { id: "3" }], - hasNext: false, - }, - ]); - }); - it("Returns payloads from synchronous data in correct order", async () => { - const document = parse(` - query HeroNameQuery { - hero { - id - ...NameFragment @defer - } - } - fragment NameFragment on Hero { - name - friends { - ...NestedFragment @defer - } - } - fragment NestedFragment on Friend { - name + }, + }); + expect(request.hasNext).toBe(true); } - `); - const result = await run(document); - expectJSON(result).toDeepEqual([ - { + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { - hero: { id: "1" }, - }, - pending: [{ id: "0", path: ["hero"] }], - hasNext: true, - }, - { - pending: [ - { id: "1", path: ["hero", "friends", 0] }, - { id: "2", path: ["hero", "friends", 1] }, - { id: "3", path: ["hero", "friends", 2] }, - ], - incremental: [ - { - data: { - name: "Luke", - friends: [{}, {}, {}], - }, - id: "0", + hero: { + id: "1", + name: "slow", + friends: [{ name: "Han" }, { name: "Leia" }, { name: "C-3PO" }], }, - { data: { name: "Han" }, id: "1" }, - { data: { name: "Leia" }, id: "2" }, - { data: { name: "C-3PO" }, id: "3" }, - ], - completed: [{ id: "0" }, { id: "1" }, { id: "2" }, { id: "3" }], - hasNext: false, - }, - ]); + }, + }); + expect(request.hasNext).toBe(false); + } }); - it("Filters deferred payloads when a list item returned by an async iterable is nulled", async () => { - const document = parse(` - query { - hero { - friends { - nonNullName - ...NameFragment @defer - } - } - } - fragment NameFragment on Friend { - name - } - `); - const result = await run(document, { - hero: { - ...hero, - async *friends() { - yield await Promise.resolve({ - ...friends[0], - nonNullName: () => Promise.resolve(null), - }); - }, - }, - }); - expectJSON(result).toDeepEqual({ - data: { - hero: { - friends: [null], - }, - }, - errors: [ - { - message: - "Cannot return null for non-nullable field Friend.nonNullName.", - locations: [{ line: 5, column: 11 }], - path: ["hero", "friends", 0, "nonNullName"], - }, - ], - }); + it.skip("Returns payloads from synchronous data in correct order", async () => { + // from the client perspective, a repeat of the last one }); - it("original execute function throws error if anything is deferred and everything else is sync", () => { - const doc = ` - query Deferred { - ... @defer { hero { id } } - } - `; - expect(() => - execute({ - schema, - document: parse(doc), - rootValue: {}, - }) - ).to.throw( - "Executing this GraphQL operation would unexpectedly produce multiple payloads (due to @defer or @stream directive)" - ); + it.skip("Filters deferred payloads when a list item returned by an async iterable is nulled", async () => { + // from the client perspective, a regular query }); - it("original execute function resolves to error if anything is deferred and something else is async", async () => { - const doc = ` - query Deferred { - hero { name } - ... @defer { hero { id } } - } - `; - await expectPromise( - execute({ - schema, - document: parse(doc), - rootValue: { - hero: { - ...hero, - name: async () => { - await resolveOnNextTick(); - return "slow"; - }, - }, - }, - }) - ).toRejectWith( - "Executing this GraphQL operation would unexpectedly produce multiple payloads (due to @defer or @stream directive)" - ); + it.skip("original execute function throws error if anything is deferred and everything else is sync", () => { + // not relevant for the client + }); + + it.skip("original execute function resolves to error if anything is deferred and something else is async", async () => { + // not relevant for the client }); }); From 12a710c8e5a839e24f82629fa2431cd678a75524 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 00:04:28 -0600 Subject: [PATCH 005/254] Add additional tests --- .../__tests__/graphql17Alpha9.test.ts | 461 +++++++++++++++++- 1 file changed, 460 insertions(+), 1 deletion(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index fe79c212075..94411e5b51b 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -15,7 +15,20 @@ import { GraphQLString, } from "graphql-17-alpha9"; -import { ApolloLink, gql, Observable } from "@apollo/client"; +import { + ApolloClient, + ApolloLink, + CombinedGraphQLErrors, + gql, + InMemoryCache, + NetworkStatus, + Observable, +} from "@apollo/client"; +import { + markAsStreaming, + mockDeferStream, + ObservableStream, +} from "@apollo/client/testing/internal"; import { GraphQL17Alpha9Handler, @@ -2286,3 +2299,449 @@ describe("graphql-js test cases", () => { // not relevant for the client }); }); + +test("GraphQL17Alpha9Handler can be used with `ApolloClient`", async () => { + const client = new ApolloClient({ + link: schemaLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query HeroNameQuery { + hero { + id + ... @defer { + name + } + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: undefined, + dataState: "empty", + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + hero: { + __typename: "Hero", + id: "1", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: false, + data: { + hero: { + __typename: "Hero", + id: "1", + name: "Luke", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); + +test("merges cache updates that happen concurrently", async () => { + const stream = mockDeferStream(); + const client = new ApolloClient({ + link: stream.httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query HeroNameQuery { + hero { + id + job + ... @defer { + name + } + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: undefined, + dataState: "empty", + networkStatus: NetworkStatus.loading, + partial: true, + }); + + stream.enqueueInitialChunk({ + data: { + hero: { + __typename: "Hero", + id: "1", + job: "Farmer", + }, + }, + hasNext: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + hero: { + __typename: "Hero", + id: "1", + job: "Farmer", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + client.cache.writeFragment({ + id: "Hero:1", + fragment: gql` + fragment HeroJob on Hero { + job + } + `, + data: { + job: "Jedi", + }, + }); + + stream.enqueueSubsequentChunk({ + incremental: [ + { + data: { + name: "Luke", + }, + path: ["hero"], + }, + ], + hasNext: false, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: false, + data: { + hero: { + __typename: "Hero", + id: "1", + job: "Jedi", // updated from cache + name: "Luke", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); + +test("returns error on initial result", async () => { + const client = new ApolloClient({ + link: schemaLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query HeroNameQuery { + hero { + id + ... @defer { + name + } + errorField + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: undefined, + dataState: "empty", + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + hero: { + __typename: "Hero", + id: "1", + errorField: null, + }, + }), + error: new CombinedGraphQLErrors({ + data: { + hero: { + __typename: "Hero", + id: "1", + errorField: null, + }, + }, + errors: [ + { + message: "bad", + path: ["hero", "errorField"], + }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: false, + data: { + hero: { + __typename: "Hero", + id: "1", + errorField: null, + name: "Luke", + }, + }, + error: new CombinedGraphQLErrors({ + data: { + hero: { + __typename: "Hero", + id: "1", + errorField: null, + name: "Luke", + }, + }, + errors: [ + { + message: "bad", + path: ["hero", "errorField"], + }, + ], + }), + dataState: "complete", + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); + +test("stream that returns an error but continues to stream", async () => { + const client = new ApolloClient({ + link: schemaLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query HeroNameQuery { + hero { + id + ... @defer { + errorField + } + ... @defer { + slowField + } + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: undefined, + dataState: "empty", + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + hero: { + __typename: "Hero", + id: "1", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + hero: { + __typename: "Hero", + id: "1", + errorField: null, + }, + }), + error: new CombinedGraphQLErrors({ + data: { + hero: { + __typename: "Hero", + id: "1", + errorField: null, + }, + }, + errors: [ + { + message: "bad", + path: ["hero", "errorField"], + }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: false, + data: { + hero: { + __typename: "Hero", + id: "1", + errorField: null, + slowField: "slow", + }, + }, + error: new CombinedGraphQLErrors({ + data: { + hero: { + __typename: "Hero", + id: "1", + errorField: null, + slowField: "slow", + }, + }, + errors: [ + { + message: "bad", + path: ["hero", "errorField"], + }, + ], + }), + dataState: "complete", + networkStatus: NetworkStatus.error, + partial: false, + }); +}); + +test("handles final chunk of { hasNext: false } correctly in usage with Apollo Client", async () => { + const stream = mockDeferStream(); + const client = new ApolloClient({ + link: stream.httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query ProductsQuery { + allProducts { + id + nonNullErrorField + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + stream.enqueueInitialChunk({ + data: { + allProducts: [null, null, null], + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Product.nonNullErrorField.", + }, + { + message: + "Cannot return null for non-nullable field Product.nonNullErrorField.", + }, + { + message: + "Cannot return null for non-nullable field Product.nonNullErrorField.", + }, + ], + hasNext: true, + }); + + stream.enqueueSubsequentChunk({ + hasNext: false, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: undefined, + dataState: "empty", + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + allProducts: [null, null, null], + }), + error: new CombinedGraphQLErrors({ + data: { + allProducts: [null, null, null], + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Product.nonNullErrorField.", + }, + { + message: + "Cannot return null for non-nullable field Product.nonNullErrorField.", + }, + { + message: + "Cannot return null for non-nullable field Product.nonNullErrorField.", + }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitSimilarValue({ + expected: (previous) => ({ + ...previous, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }), + }); + await expect(observableStream).not.toEmitAnything(); +}); From 225edff77571c39f0b80770d6a1837021c6e2dc0 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 00:06:01 -0600 Subject: [PATCH 006/254] Create stub of GraphQL17Alpha9Handler --- src/incremental/handlers/graphql17Alpha9.ts | 53 +++++++++++++++++++++ src/incremental/index.ts | 1 + 2 files changed, 54 insertions(+) create mode 100644 src/incremental/handlers/graphql17Alpha9.ts diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts new file mode 100644 index 00000000000..fd9a0bcd9d1 --- /dev/null +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -0,0 +1,53 @@ +import type { DocumentNode, GraphQLFormattedError } from "graphql"; + +import type { ApolloLink } from "@apollo/client"; +import type { HKT } from "@apollo/client/utilities"; +import { isNonEmptyArray } from "@apollo/client/utilities/internal"; + +import type { Incremental } from "../types.js"; + +export declare namespace GraphQL17Alpha9Handler { + interface GraphQL17Alpha9Result extends HKT { + arg1: unknown; // TData + arg2: unknown; // TExtensions + return: GraphQL17Alpha9Handler.Chunk>; + } + export interface TypeOverrides { + AdditionalApolloLinkResultTypes: GraphQL17Alpha9Result; + } + + export type InitialResult> = {}; + export type SubsequentResult> = {}; + + export type Chunk> = + | InitialResult + | SubsequentResult; +} + +export class GraphQL17Alpha9Handler> + implements Incremental.Handler> +{ + isIncrementalResult: ( + result: ApolloLink.Result + ) => result is GraphQL17Alpha9Handler.Chunk; + + prepareRequest: (request: ApolloLink.Request) => ApolloLink.Request; + + extractErrors: ( + result: ApolloLink.Result + ) => readonly GraphQLFormattedError[] | undefined | void; + + startRequest: >(request: { + query: DocumentNode; + }) => Incremental.IncrementalRequest< + GraphQL17Alpha9Handler.Chunk, + TData + >; +} + +// only exported for use in tests +export function hasIncrementalChunks( + result: Record +): result is Required { + return isNonEmptyArray(result.incremental); +} diff --git a/src/incremental/index.ts b/src/incremental/index.ts index c340efe8574..334b0dcc826 100644 --- a/src/incremental/index.ts +++ b/src/incremental/index.ts @@ -4,3 +4,4 @@ export { Defer20220824Handler, Defer20220824Handler as GraphQL17Alpha2Handler, } from "./handlers/defer20220824.js"; +export { GraphQL17Alpha9Handler } from "./handlers/graphql17Alpha9.js"; From fbbd08e51f5c98cd7be8eaed7070eb686bd7c45c Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 00:18:51 -0600 Subject: [PATCH 007/254] Add chunk types for alpha9 --- src/incremental/handlers/graphql17Alpha9.ts | 49 ++++++++++++++++++++- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index fd9a0bcd9d1..b0ce9b2bfae 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -12,12 +12,57 @@ export declare namespace GraphQL17Alpha9Handler { arg2: unknown; // TExtensions return: GraphQL17Alpha9Handler.Chunk>; } + export interface TypeOverrides { AdditionalApolloLinkResultTypes: GraphQL17Alpha9Result; } - export type InitialResult> = {}; - export type SubsequentResult> = {}; + export type InitialResult> = { + data: TData; + pending: ReadonlyArray; + hasNext: boolean; + extensions?: Record; + }; + + export type SubsequentResult> = { + hasNext: boolean; + pending?: ReadonlyArray; + incremental?: ReadonlyArray>; + completed?: ReadonlyArray; + extensions?: Record; + }; + + export interface PendingResult { + id: string; + path: Incremental.Path; + label?: string; + } + + export interface CompletedResult { + path: Incremental.Path; + label?: string; + errors?: ReadonlyArray; + } + + export interface IncrementalDeferResult> { + errors?: ReadonlyArray; + data: TData; + id: string; + subPath?: ReadonlyArray; + extensions?: Record; + } + + export interface IncrementalStreamResult> { + errors?: ReadonlyArray; + items: TData; + id: string; + subPath?: ReadonlyArray; + extensions?: Record; + } + + export type IncrementalResult> = + | IncrementalDeferResult + | IncrementalStreamResult; export type Chunk> = | InitialResult From 1626cf6a5a71c1b0af1dac764491d42f1945d4d6 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 00:30:15 -0600 Subject: [PATCH 008/254] Add stub implementations for abstract methods --- src/incremental/handlers/graphql17Alpha9.ts | 66 +++++++++++++++------ 1 file changed, 49 insertions(+), 17 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index b0ce9b2bfae..648b43886e9 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -1,8 +1,15 @@ -import type { DocumentNode, GraphQLFormattedError } from "graphql"; - -import type { ApolloLink } from "@apollo/client"; -import type { HKT } from "@apollo/client/utilities"; -import { isNonEmptyArray } from "@apollo/client/utilities/internal"; +import type { + DocumentNode, + FormattedExecutionResult, + GraphQLFormattedError, +} from "graphql"; + +import type { ApolloLink } from "@apollo/client/link"; +import type { DeepPartial, HKT } from "@apollo/client/utilities"; +import { + hasDirectives, + isNonEmptyArray, +} from "@apollo/client/utilities/internal"; import type { Incremental } from "../types.js"; @@ -69,25 +76,50 @@ export declare namespace GraphQL17Alpha9Handler { | SubsequentResult; } +class IncrementalRequest> + implements + Incremental.IncrementalRequest, TData> +{ + hasNext = true; + + private data: any = {}; + + handle( + cacheData: TData | DeepPartial | null | undefined = this.data, + chunk: GraphQL17Alpha9Handler.Chunk + ): FormattedExecutionResult { + return { data: null }; + } +} + export class GraphQL17Alpha9Handler> - implements Incremental.Handler> + implements Incremental.Handler> { - isIncrementalResult: ( + isIncrementalResult( result: ApolloLink.Result - ) => result is GraphQL17Alpha9Handler.Chunk; + ): result is GraphQL17Alpha9Handler.Chunk { + return "hasNext" in result; + } - prepareRequest: (request: ApolloLink.Request) => ApolloLink.Request; + prepareRequest(request: ApolloLink.Request): ApolloLink.Request { + if (hasDirectives(["defer"], request.query)) { + const context = request.context ?? {}; + const http = (context.http ??= {}); + http.accept = ["multipart/mixed", ...(http.accept || [])]; - extractErrors: ( - result: ApolloLink.Result - ) => readonly GraphQLFormattedError[] | undefined | void; + request.context = context; + } + + return request; + } + + extractErrors(result: ApolloLink.Result) {} - startRequest: >(request: { + startRequest>(_: { query: DocumentNode; - }) => Incremental.IncrementalRequest< - GraphQL17Alpha9Handler.Chunk, - TData - >; + }) { + return new IncrementalRequest(); + } } // only exported for use in tests From 2681c698e9bc92451d92ebb30c6dc6f889b49eca Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 11:04:03 -0600 Subject: [PATCH 009/254] Update types to be more compatible with graphql-js --- src/incremental/handlers/graphql17Alpha9.ts | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 648b43886e9..e791a4cf3d2 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -26,12 +26,13 @@ export declare namespace GraphQL17Alpha9Handler { export type InitialResult> = { data: TData; + errors?: ReadonlyArray; pending: ReadonlyArray; hasNext: boolean; extensions?: Record; }; - export type SubsequentResult> = { + export type SubsequentResult = { hasNext: boolean; pending?: ReadonlyArray; incremental?: ReadonlyArray>; @@ -55,7 +56,7 @@ export declare namespace GraphQL17Alpha9Handler { errors?: ReadonlyArray; data: TData; id: string; - subPath?: ReadonlyArray; + subPath?: Incremental.Path; extensions?: Record; } @@ -63,20 +64,18 @@ export declare namespace GraphQL17Alpha9Handler { errors?: ReadonlyArray; items: TData; id: string; - subPath?: ReadonlyArray; + subPath?: Incremental.Path; extensions?: Record; } - export type IncrementalResult> = + export type IncrementalResult = | IncrementalDeferResult | IncrementalStreamResult; - export type Chunk> = - | InitialResult - | SubsequentResult; + export type Chunk = InitialResult | SubsequentResult; } -class IncrementalRequest> +class IncrementalRequest implements Incremental.IncrementalRequest, TData> { @@ -115,9 +114,7 @@ export class GraphQL17Alpha9Handler> extractErrors(result: ApolloLink.Result) {} - startRequest>(_: { - query: DocumentNode; - }) { + startRequest(_: { query: DocumentNode }) { return new IncrementalRequest(); } } From 6cb1c20d13c0b49f64ebea0df3eedf770d585659 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 11:20:18 -0600 Subject: [PATCH 010/254] Fix type of completed result --- src/incremental/handlers/graphql17Alpha9.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index e791a4cf3d2..7515f8a79ac 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -47,8 +47,7 @@ export declare namespace GraphQL17Alpha9Handler { } export interface CompletedResult { - path: Incremental.Path; - label?: string; + id: string; errors?: ReadonlyArray; } From ba85d93787544630ba91fa304723b0b71f8e2a53 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 11:29:03 -0600 Subject: [PATCH 011/254] Add initial implementation of merging --- src/incremental/handlers/graphql17Alpha9.ts | 74 ++++++++++++++++++++- 1 file changed, 72 insertions(+), 2 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 7515f8a79ac..0d20a5652f0 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -6,10 +6,12 @@ import type { import type { ApolloLink } from "@apollo/client/link"; import type { DeepPartial, HKT } from "@apollo/client/utilities"; +import { DeepMerger } from "@apollo/client/utilities/internal"; import { hasDirectives, isNonEmptyArray, } from "@apollo/client/utilities/internal"; +import { invariant } from "@apollo/client/utilities/invariant"; import type { Incremental } from "../types.js"; @@ -81,16 +83,84 @@ class IncrementalRequest hasNext = true; private data: any = {}; + private errors: GraphQLFormattedError[] = []; + private extensions: Record = {}; + private pending: GraphQL17Alpha9Handler.PendingResult[] = []; + private merger = new DeepMerger(); handle( cacheData: TData | DeepPartial | null | undefined = this.data, chunk: GraphQL17Alpha9Handler.Chunk ): FormattedExecutionResult { - return { data: null }; + this.hasNext = chunk.hasNext; + this.data = cacheData; + + if (chunk.pending) { + this.pending.push(...chunk.pending); + } + + this.mergeIn(chunk); + + if (hasIncrementalChunks(chunk)) { + for (const incremental of chunk.incremental) { + // TODO: Implement support for `@stream`. For now we will skip handling + // streamed responses + if ("items" in incremental) { + continue; + } + + const pending = this.pending.find(({ id }) => incremental.id === id); + invariant( + pending, + "Could not find pending chunk for incremental value. Please file an issue because this is a bug in Apollo Client." + ); + + let { data } = incremental; + const { path } = pending; + + for (let i = path.length - 1; i >= 0; i--) { + const key = path[i]; + const parent: Record = + typeof key === "number" ? [] : {}; + parent[key] = incremental.data; + data = parent as typeof data; + } + + this.mergeIn({ + data: data as TData, + extensions: incremental.extensions, + }); + + for (const completed of chunk.completed) { + const index = this.pending.findIndex(({ id }) => id === completed.id); + this.pending.splice(index, 1); + } + } + } + + const result: FormattedExecutionResult = { data: this.data }; + + if (isNonEmptyArray(this.errors)) { + result.errors = this.errors; + } + + if (Object.keys(this.extensions).length > 0) { + result.extensions = this.extensions; + } + + return result; + } + + private mergeIn(normalized: FormattedExecutionResult) { + if (normalized.data !== undefined) { + this.data = this.merger.merge(this.data, normalized.data); + } + + Object.assign(this.extensions, normalized.extensions); } } -export class GraphQL17Alpha9Handler> +export class GraphQL17Alpha9Handler implements Incremental.Handler> { isIncrementalResult( From 6d8c443767505c5ffb3a6f73a23c159807e4c0b9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 11:36:45 -0600 Subject: [PATCH 012/254] Add patch for types in v17-alpha9 --- patches/graphql-17-alpha9+17.0.0-alpha.9.patch | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 patches/graphql-17-alpha9+17.0.0-alpha.9.patch diff --git a/patches/graphql-17-alpha9+17.0.0-alpha.9.patch b/patches/graphql-17-alpha9+17.0.0-alpha.9.patch new file mode 100644 index 00000000000..591af1a11f4 --- /dev/null +++ b/patches/graphql-17-alpha9+17.0.0-alpha.9.patch @@ -0,0 +1,16 @@ +diff --git a/node_modules/graphql-17-alpha9/execution/types.d.ts b/node_modules/graphql-17-alpha9/execution/types.d.ts +index 48ef2e9..6ef2ab3 100644 +--- a/node_modules/graphql-17-alpha9/execution/types.d.ts ++++ b/node_modules/graphql-17-alpha9/execution/types.d.ts +@@ -95,9 +95,8 @@ export interface CompletedResult { + errors?: ReadonlyArray; + } + export interface FormattedCompletedResult { +- path: ReadonlyArray; +- label?: string; +- errors?: ReadonlyArray; ++ id: string; ++ errors?: ReadonlyArray; + } + export declare function isPendingExecutionGroup(incrementalDataRecord: IncrementalDataRecord): incrementalDataRecord is PendingExecutionGroup; + export type CompletedExecutionGroup = SuccessfulExecutionGroup | FailedExecutionGroup; From 9cd60c446da9c96b0eae577bf382f1c177377156 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 11:37:25 -0600 Subject: [PATCH 013/254] Add return type for async generator for run function --- src/incremental/handlers/__tests__/graphql17Alpha9.test.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index 94411e5b51b..1519bbdede7 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -2,6 +2,7 @@ import assert from "node:assert"; import type { DocumentNode, + FormattedExecutionResult, FormattedInitialIncrementalExecutionResult, FormattedSubsequentIncrementalExecutionResult, } from "graphql-17-alpha9"; @@ -175,11 +176,12 @@ function promiseWithResolvers(): { async function* run( document: DocumentNode, - rootValue: unknown = { hero }, + rootValue: Record = { hero }, enableEarlyExecution = false ): AsyncGenerator< | FormattedInitialIncrementalExecutionResult - | FormattedSubsequentIncrementalExecutionResult + | FormattedSubsequentIncrementalExecutionResult, + FormattedExecutionResult | void > { const result = await experimentalExecuteIncrementally({ schema, From b039ee4e627300a9b7fa5ab64486b91292829971 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 11:39:01 -0600 Subject: [PATCH 014/254] Add errors when merging --- src/incremental/handlers/graphql17Alpha9.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 0d20a5652f0..87081bf96a9 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -156,6 +156,10 @@ class IncrementalRequest this.data = this.merger.merge(this.data, normalized.data); } + if (normalized.errors) { + this.errors.push(...normalized.errors); + } + Object.assign(this.extensions, normalized.extensions); } } From 74cb0a436788ce7490168d4afae37fd205430127 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 11:46:00 -0600 Subject: [PATCH 015/254] Ensure errors are merged from incremental results --- src/incremental/handlers/graphql17Alpha9.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 87081bf96a9..1a7dff2d29b 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -129,6 +129,7 @@ class IncrementalRequest this.mergeIn({ data: data as TData, extensions: incremental.extensions, + errors: incremental.errors, }); for (const completed of chunk.completed) { From c17707fed2902d6d270786d0c404bb87d7a79ec0 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 11:48:41 -0600 Subject: [PATCH 016/254] Iterate completed after merging all incremental items --- src/incremental/handlers/graphql17Alpha9.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 1a7dff2d29b..4ca5115b8ca 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -131,11 +131,13 @@ class IncrementalRequest extensions: incremental.extensions, errors: incremental.errors, }); + } + } - for (const completed of chunk.completed) { - const index = this.pending.findIndex(({ id }) => id === completed.id); - this.pending.splice(index, 1); - } + if ("completed" in chunk && chunk.completed) { + for (const completed of chunk.completed) { + const index = this.pending.findIndex(({ id }) => id === completed.id); + this.pending.splice(index, 1); } } From e7e11b7e3b5827b6b6ac6f073b9845679c48ef80 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 11:49:19 -0600 Subject: [PATCH 017/254] Remove locations from error --- src/incremental/handlers/__tests__/graphql17Alpha9.test.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index 1519bbdede7..b5200f503ba 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -402,7 +402,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "bad", - locations: [{ line: 7, column: 11 }], path: ["hero", "name"], }, ], From 21042b935938c555adfb7834102c631ba51962a5 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 12:21:47 -0600 Subject: [PATCH 018/254] Fix incorrect data merged in --- src/incremental/handlers/graphql17Alpha9.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 4ca5115b8ca..c8dd5cdcd72 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -122,7 +122,7 @@ class IncrementalRequest const key = path[i]; const parent: Record = typeof key === "number" ? [] : {}; - parent[key] = incremental.data; + parent[key] = data; data = parent as typeof data; } From 53bd6ce826ba067d84dc8b8b03a8e60efeb23cff Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 12:25:12 -0600 Subject: [PATCH 019/254] Merge errors in completed array --- src/incremental/handlers/graphql17Alpha9.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index c8dd5cdcd72..97500f27f01 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -138,6 +138,10 @@ class IncrementalRequest for (const completed of chunk.completed) { const index = this.pending.findIndex(({ id }) => id === completed.id); this.pending.splice(index, 1); + + if (completed.errors) { + this.errors.push(...completed.errors); + } } } From 978228af44e654b39bd29fd2cf17f8d5920d02e9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 12:26:08 -0600 Subject: [PATCH 020/254] Remove locations in error tests --- .../handlers/__tests__/graphql17Alpha9.test.ts | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index b5200f503ba..8df4f57e63b 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -1556,7 +1556,6 @@ describe("graphql-js test cases", () => { { message: "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 8, column: 17 }], path: ["a", "b", "c", "nonNullErrorField"], }, ], @@ -1631,7 +1630,6 @@ describe("graphql-js test cases", () => { { message: "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 17, column: 17 }], path: ["a", "b", "c", "nonNullErrorField"], }, ], @@ -1696,13 +1694,11 @@ describe("graphql-js test cases", () => { { message: "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 7, column: 17 }], path: ["a", "b", "c", "someError"], }, { message: "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 16, column: 17 }], path: ["a", "b", "c", "anotherError"], }, ], @@ -1780,7 +1776,6 @@ describe("graphql-js test cases", () => { { message: "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 19, column: 17 }], path: ["a", "b", "someC", "someError"], }, ], @@ -1867,7 +1862,6 @@ describe("graphql-js test cases", () => { { message: "Cannot return null for non-nullable field c.nonNullErrorField.", - locations: [{ line: 8, column: 17 }], path: ["a", "b", "c", "nonNullErrorField"], }, ], @@ -1932,7 +1926,6 @@ describe("graphql-js test cases", () => { { message: "Cannot return null for non-nullable field Hero.nonNullName.", - locations: [{ line: 5, column: 13 }], path: ["hero", "nonNullName"], }, ], @@ -2079,7 +2072,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "bad", - locations: [{ line: 9, column: 9 }], path: ["hero", "name"], }, ], @@ -2142,7 +2134,6 @@ describe("graphql-js test cases", () => { { message: "Cannot return null for non-nullable field Hero.nonNullName.", - locations: [{ line: 9, column: 9 }], path: ["hero", "nonNullName"], }, ], @@ -2209,7 +2200,6 @@ describe("graphql-js test cases", () => { { message: "Cannot return null for non-nullable field Hero.nonNullName.", - locations: [{ line: 9, column: 9 }], path: ["hero", "nonNullName"], }, ], From 706bda0d0821165b996cfc738630fdbd74d9760b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 12:29:06 -0600 Subject: [PATCH 021/254] Fix incorrect assertion --- src/incremental/handlers/__tests__/graphql17Alpha9.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index 8df4f57e63b..2df5e73789c 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -1687,7 +1687,7 @@ describe("graphql-js test cases", () => { assert(!done); expect(handler.isIncrementalResult(chunk)).toBe(true); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, errors: [ From e6731f844d925e5b2a1a8ce54c36290c3c4d8a88 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 12:33:40 -0600 Subject: [PATCH 022/254] Fix missing assertion in test --- .../handlers/__tests__/graphql17Alpha9.test.ts | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index 2df5e73789c..7cbbb743a0f 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -1850,6 +1850,24 @@ describe("graphql-js test cases", () => { assert(!done); expect(handler.isIncrementalResult(chunk)).toBe(true); expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + a: { + b: { + c: { d: "d" }, + }, + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + expect(handler.isIncrementalResult(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { From dba62d4d1822cd49e01a24666d006e21a90bd113 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 12:36:16 -0600 Subject: [PATCH 023/254] Fix incorrect assertion --- src/incremental/handlers/__tests__/graphql17Alpha9.test.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index 7cbbb743a0f..3e89dbe6169 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -2129,7 +2129,7 @@ describe("graphql-js test cases", () => { expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { - name: "Luke", + id: "1", }, }, }); @@ -2141,7 +2141,7 @@ describe("graphql-js test cases", () => { assert(!done); expect(handler.isIncrementalResult(chunk)).toBe(true); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -2207,7 +2207,7 @@ describe("graphql-js test cases", () => { assert(!done); expect(handler.isIncrementalResult(chunk)).toBe(true); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { From ee5cb45ccb0062f77f52f0599405fa8e4b5565f7 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 12:48:47 -0600 Subject: [PATCH 024/254] Update test to use actual schema fields --- .../__tests__/graphql17Alpha9.test.ts | 53 +++++++++++-------- 1 file changed, 31 insertions(+), 22 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index 3e89dbe6169..40e73dddca2 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -205,16 +205,18 @@ async function* run( } } -const schemaLink = new ApolloLink((operation) => { - return new Observable((observer) => { - void (async () => { - for await (const chunk of run(operation.query)) { - observer.next(chunk); - } - observer.complete(); - })(); +function createSchemaLink(rootValue?: Record) { + return new ApolloLink((operation) => { + return new Observable((observer) => { + void (async () => { + for await (const chunk of run(operation.query, rootValue)) { + observer.next(chunk); + } + observer.complete(); + })(); + }); }); -}); +} describe("graphql-js test cases", () => { // These test cases mirror defer tests of the `graphql-js` v17.0.0-alpha.9 release: @@ -2556,7 +2558,16 @@ test("returns error on initial result", async () => { test("stream that returns an error but continues to stream", async () => { const client = new ApolloClient({ - link: schemaLink, + link: createSchemaLink({ + hero: { + ...hero, + nonNullName: null, + name: async () => { + await wait(100); + return "slow"; + }, + }, + }), cache: new InMemoryCache(), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -2566,10 +2577,10 @@ test("stream that returns an error but continues to stream", async () => { hero { id ... @defer { - errorField + nonNullName } ... @defer { - slowField + name } } } @@ -2606,7 +2617,6 @@ test("stream that returns an error but continues to stream", async () => { hero: { __typename: "Hero", id: "1", - errorField: null, }, }), error: new CombinedGraphQLErrors({ @@ -2614,13 +2624,13 @@ test("stream that returns an error but continues to stream", async () => { hero: { __typename: "Hero", id: "1", - errorField: null, }, }, errors: [ { - message: "bad", - path: ["hero", "errorField"], + message: + "Cannot return null for non-nullable field Hero.nonNullName.", + path: ["hero", "nonNullName"], }, ], }), @@ -2635,8 +2645,7 @@ test("stream that returns an error but continues to stream", async () => { hero: { __typename: "Hero", id: "1", - errorField: null, - slowField: "slow", + name: "slow", }, }, error: new CombinedGraphQLErrors({ @@ -2644,14 +2653,14 @@ test("stream that returns an error but continues to stream", async () => { hero: { __typename: "Hero", id: "1", - errorField: null, - slowField: "slow", + name: "slow", }, }, errors: [ { - message: "bad", - path: ["hero", "errorField"], + message: + "Cannot return null for non-nullable field Hero.nonNullName.", + path: ["hero", "nonNullName"], }, ], }), From 33ba203cf43ad96686de750859ba6d20b35096bb Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 13:10:10 -0600 Subject: [PATCH 025/254] Yield a regular result instead of return --- .../__tests__/graphql17Alpha9.test.ts | 141 +++++++++--------- 1 file changed, 72 insertions(+), 69 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index 40e73dddca2..5af5cde789a 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -29,6 +29,7 @@ import { markAsStreaming, mockDeferStream, ObservableStream, + wait, } from "@apollo/client/testing/internal"; import { @@ -180,8 +181,9 @@ async function* run( enableEarlyExecution = false ): AsyncGenerator< | FormattedInitialIncrementalExecutionResult - | FormattedSubsequentIncrementalExecutionResult, - FormattedExecutionResult | void + | FormattedSubsequentIncrementalExecutionResult + | FormattedExecutionResult, + void > { const result = await experimentalExecuteIncrementally({ schema, @@ -201,7 +203,7 @@ async function* run( ) as FormattedSubsequentIncrementalExecutionResult; } } else { - return result; + yield result; } } @@ -244,7 +246,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -260,7 +262,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -329,7 +331,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -341,7 +343,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -381,7 +383,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -393,7 +395,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -439,7 +441,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -453,7 +455,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -495,7 +497,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -511,7 +513,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -551,7 +553,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -565,7 +567,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -601,7 +603,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -615,7 +617,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -654,7 +656,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -666,7 +668,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -709,7 +711,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -723,7 +725,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -759,7 +761,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -771,7 +773,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -833,7 +835,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -845,7 +847,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -865,7 +867,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -930,7 +932,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -942,7 +944,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -958,7 +960,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1010,7 +1012,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1026,7 +1028,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1086,7 +1088,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1111,7 +1113,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1163,7 +1165,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1177,7 +1179,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1244,7 +1246,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1264,7 +1266,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1320,7 +1322,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1338,7 +1340,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1403,7 +1405,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1421,7 +1423,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1468,7 +1470,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -1480,7 +1482,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1530,7 +1532,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1544,7 +1546,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1604,7 +1606,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1618,7 +1620,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1676,7 +1678,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -1688,7 +1690,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -1751,7 +1753,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -1763,7 +1765,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1836,7 +1838,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1850,7 +1852,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1868,7 +1870,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1924,7 +1926,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -1936,7 +1938,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -1990,7 +1992,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -2006,7 +2008,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -2064,7 +2066,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -2080,7 +2082,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -2126,7 +2128,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -2142,7 +2144,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -2192,7 +2194,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -2208,7 +2210,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -2263,7 +2265,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -2279,7 +2281,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = await incoming.next(); assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -2313,7 +2315,7 @@ describe("graphql-js test cases", () => { test("GraphQL17Alpha9Handler can be used with `ApolloClient`", async () => { const client = new ApolloClient({ - link: schemaLink, + link: createSchemaLink(), cache: new InMemoryCache(), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -2367,7 +2369,8 @@ test("GraphQL17Alpha9Handler can be used with `ApolloClient`", async () => { }); }); -test("merges cache updates that happen concurrently", async () => { +// TODO: Add test helpers for new format +test.failing("merges cache updates that happen concurrently", async () => { const stream = mockDeferStream(); const client = new ApolloClient({ link: stream.httpLink, @@ -2464,7 +2467,7 @@ test("merges cache updates that happen concurrently", async () => { test("returns error on initial result", async () => { const client = new ApolloClient({ - link: schemaLink, + link: createSchemaLink(), cache: new InMemoryCache(), incrementalHandler: new GraphQL17Alpha9Handler(), }); From 11823bd9c7911dd10076d2bdf227245829986d9e Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 13:18:49 -0600 Subject: [PATCH 026/254] Ensure errors are serialized from helper --- src/incremental/handlers/__tests__/graphql17Alpha9.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index 5af5cde789a..65e66d1138f 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -203,7 +203,7 @@ async function* run( ) as FormattedSubsequentIncrementalExecutionResult; } } else { - yield result; + yield JSON.parse(JSON.stringify(result)) as FormattedExecutionResult; } } From 40213fa97d650b16daf3e8f33872016012906270 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 13:21:28 -0600 Subject: [PATCH 027/254] Update test to match incremental behavior --- .../__tests__/graphql17Alpha9.test.ts | 57 ++++--------------- 1 file changed, 12 insertions(+), 45 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index 65e66d1138f..6dcd5467ec9 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -2467,7 +2467,12 @@ test.failing("merges cache updates that happen concurrently", async () => { test("returns error on initial result", async () => { const client = new ApolloClient({ - link: createSchemaLink(), + link: createSchemaLink({ + hero: { + ...hero, + nonNullName: null, + }, + }), cache: new InMemoryCache(), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -2479,7 +2484,7 @@ test("returns error on initial result", async () => { ... @defer { name } - errorField + nonNullName } } `; @@ -2496,58 +2501,20 @@ test("returns error on initial result", async () => { partial: true, }); - await expect(observableStream).toEmitTypedValue({ - loading: true, - data: markAsStreaming({ - hero: { - __typename: "Hero", - id: "1", - errorField: null, - }, - }), - error: new CombinedGraphQLErrors({ - data: { - hero: { - __typename: "Hero", - id: "1", - errorField: null, - }, - }, - errors: [ - { - message: "bad", - path: ["hero", "errorField"], - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - partial: true, - }); - await expect(observableStream).toEmitTypedValue({ loading: false, data: { - hero: { - __typename: "Hero", - id: "1", - errorField: null, - name: "Luke", - }, + hero: null, }, error: new CombinedGraphQLErrors({ data: { - hero: { - __typename: "Hero", - id: "1", - errorField: null, - name: "Luke", - }, + hero: null, }, errors: [ { - message: "bad", - path: ["hero", "errorField"], + message: + "Cannot return null for non-nullable field Hero.nonNullName.", + path: ["hero", "nonNullName"], }, ], }), From 6e6ff31f2fbe58b7438e35a70d0917cd251393fd Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 13:24:16 -0600 Subject: [PATCH 028/254] Update how path is calculated --- src/incremental/handlers/graphql17Alpha9.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 97500f27f01..91a50582f48 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -116,7 +116,7 @@ class IncrementalRequest ); let { data } = incremental; - const { path } = pending; + const path = pending.path.concat(incremental.subPath ?? []); for (let i = path.length - 1; i >= 0; i--) { const key = path[i]; From fa0b6878c2a0cb793a7a32ab015154f1b6366f80 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 13:25:59 -0600 Subject: [PATCH 029/254] Temp skip test --- src/incremental/handlers/__tests__/graphql17Alpha9.test.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts index 6dcd5467ec9..fcf37cdf4bb 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts @@ -2640,7 +2640,8 @@ test("stream that returns an error but continues to stream", async () => { }); }); -test("handles final chunk of { hasNext: false } correctly in usage with Apollo Client", async () => { +// TODO: Update to use test utils with updated types +test.skip("handles final chunk of { hasNext: false } correctly in usage with Apollo Client", async () => { const stream = mockDeferStream(); const client = new ApolloClient({ link: stream.httpLink, From ec9792f9e4ac142816bd2d600f7b695ab16ebe98 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 13:26:58 -0600 Subject: [PATCH 030/254] Move test to /defer.test.ts --- .../{graphql17Alpha9.test.ts => graphql17Alpha9/defer.test.ts} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename src/incremental/handlers/__tests__/{graphql17Alpha9.test.ts => graphql17Alpha9/defer.test.ts} (99%) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts similarity index 99% rename from src/incremental/handlers/__tests__/graphql17Alpha9.test.ts rename to src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts index fcf37cdf4bb..888ace8cea2 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts @@ -36,7 +36,7 @@ import { GraphQL17Alpha9Handler, hasIncrementalChunks, // eslint-disable-next-line local-rules/no-relative-imports -} from "../graphql17Alpha9.js"; +} from "../../graphql17Alpha9.js"; // This is the test setup of the `graphql-js` v17.0.0-alpha.9 release: // https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/defer-test.ts From 8d61c1d37f9759ceb050944d040d304b356e5475 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 13:39:05 -0600 Subject: [PATCH 031/254] Update extractErrors --- src/incremental/handlers/graphql17Alpha9.ts | 23 ++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 91a50582f48..5a2df6a0580 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -192,7 +192,28 @@ export class GraphQL17Alpha9Handler return request; } - extractErrors(result: ApolloLink.Result) {} + extractErrors(result: ApolloLink.Result) { + const acc: GraphQLFormattedError[] = []; + const push = ({ + errors, + }: { + errors?: ReadonlyArray; + }) => { + if (errors) { + acc.push(...errors); + } + }; + + push(result); + + if (this.isIncrementalResult(result)) { + push(new IncrementalRequest().handle(undefined, result)); + } + + if (acc.length) { + return acc; + } + } startRequest(_: { query: DocumentNode }) { return new IncrementalRequest(); From 2610442af45cc0c43d3cb247d416cb7d393b761f Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 13:43:30 -0600 Subject: [PATCH 032/254] Rename merge --- src/incremental/handlers/graphql17Alpha9.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 5a2df6a0580..5e1e73cbc88 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -99,7 +99,7 @@ class IncrementalRequest this.pending.push(...chunk.pending); } - this.mergeIn(chunk); + this.merge(chunk); if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { @@ -126,7 +126,7 @@ class IncrementalRequest data = parent as typeof data; } - this.mergeIn({ + this.merge({ data: data as TData, extensions: incremental.extensions, errors: incremental.errors, @@ -158,7 +158,7 @@ class IncrementalRequest return result; } - private mergeIn(normalized: FormattedExecutionResult) { + private merge(normalized: FormattedExecutionResult) { if (normalized.data !== undefined) { this.data = this.merger.merge(this.data, normalized.data); } From dbf5b7f80946242acef9a2734363a9c56592b202 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 13:45:17 -0600 Subject: [PATCH 033/254] Make types mirror defer implementation --- src/incremental/handlers/graphql17Alpha9.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 5e1e73cbc88..c8934c97fab 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -171,12 +171,14 @@ class IncrementalRequest } } -export class GraphQL17Alpha9Handler +export class GraphQL17Alpha9Handler implements Incremental.Handler> { isIncrementalResult( result: ApolloLink.Result - ): result is GraphQL17Alpha9Handler.Chunk { + ): result is + | GraphQL17Alpha9Handler.InitialResult + | GraphQL17Alpha9Handler.SubsequentResult { return "hasNext" in result; } From 723c163a11106fc31ecdf5ee892f191cdc065493 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 13:46:01 -0600 Subject: [PATCH 034/254] Update exports snapshot --- src/__tests__/__snapshots__/exports.ts.snap | 1 + 1 file changed, 1 insertion(+) diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap index c7343506bff..6505b8a0721 100644 --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -145,6 +145,7 @@ exports[`exports of public entry points @apollo/client/incremental 1`] = ` Array [ "Defer20220824Handler", "GraphQL17Alpha2Handler", + "GraphQL17Alpha9Handler", "NotImplementedHandler", ] `; From a12de86ff025c34946c5e7fc896cb42499f7c048 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:22:27 -0600 Subject: [PATCH 035/254] Split out helper for mocking an incremental stream --- src/testing/internal/incremental/utils.ts | 103 ++++++++++++++++++++++ 1 file changed, 103 insertions(+) create mode 100644 src/testing/internal/incremental/utils.ts diff --git a/src/testing/internal/incremental/utils.ts b/src/testing/internal/incremental/utils.ts new file mode 100644 index 00000000000..2d6be2627ac --- /dev/null +++ b/src/testing/internal/incremental/utils.ts @@ -0,0 +1,103 @@ +import { + ReadableStream as NodeReadableStream, + TextEncoderStream, + TransformStream, +} from "node:stream/web"; + +import { HttpLink } from "@apollo/client/link/http"; + +const hasNextSymbol = Symbol("hasNext"); + +export function mockIncrementalStream({ + responseHeaders, +}: { + responseHeaders: Headers; +}) { + type Payload = Chunks & { [hasNextSymbol]: boolean }; + const CLOSE = Symbol(); + let streamController: ReadableStreamDefaultController | null = null; + let sentInitialChunk = false; + + const queue: Array = []; + + function processQueue() { + if (!streamController) { + throw new Error("Cannot process queue without stream controller"); + } + + let chunk; + while ((chunk = queue.shift())) { + if (chunk === CLOSE) { + streamController.close(); + } else { + streamController.enqueue(chunk); + } + } + } + + function createStream() { + return new NodeReadableStream({ + start(c) { + streamController = c; + processQueue(); + }, + }) + .pipeThrough( + new TransformStream({ + transform: (chunk, controller) => { + controller.enqueue( + (!sentInitialChunk ? "\r\n---\r\n" : "") + + "content-type: application/json; charset=utf-8\r\n\r\n" + + JSON.stringify(chunk) + + (chunk[hasNextSymbol] ? "\r\n---\r\n" : "\r\n-----\r\n") + ); + sentInitialChunk = true; + }, + }) + ) + .pipeThrough(new TextEncoderStream()); + } + + const httpLink = new HttpLink({ + fetch(input, init) { + return Promise.resolve( + new Response( + createStream() satisfies NodeReadableStream as ReadableStream, + { + status: 200, + headers: responseHeaders, + } + ) + ); + }, + }); + + function queueNext(event: Payload | typeof CLOSE) { + queue.push(event); + + if (streamController) { + processQueue(); + } + } + + function close() { + queueNext(CLOSE); + + streamController = null; + sentInitialChunk = false; + } + + function enqueue(chunk: Chunks, hasNext: boolean) { + queueNext({ ...chunk, [hasNextSymbol]: hasNext }); + + if (!hasNext) { + close(); + } + } + + return { + httpLink, + enqueue, + close, + }; +} From df5b353058011163a56e6635d99988d3af31e512 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:23:43 -0600 Subject: [PATCH 036/254] Rename helper --- src/testing/internal/incremental/utils.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/testing/internal/incremental/utils.ts b/src/testing/internal/incremental/utils.ts index 2d6be2627ac..70b9bae52d2 100644 --- a/src/testing/internal/incremental/utils.ts +++ b/src/testing/internal/incremental/utils.ts @@ -8,7 +8,7 @@ import { HttpLink } from "@apollo/client/link/http"; const hasNextSymbol = Symbol("hasNext"); -export function mockIncrementalStream({ +export function mockMultipartStream({ responseHeaders, }: { responseHeaders: Headers; From f4a4a9f40c5d3a93b2baf766c2e48bbff71c4ad9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:24:59 -0600 Subject: [PATCH 037/254] Use updated helper in incremental utils --- src/testing/internal/incremental.ts | 107 +--------------------------- 1 file changed, 3 insertions(+), 104 deletions(-) diff --git a/src/testing/internal/incremental.ts b/src/testing/internal/incremental.ts index a457b2189ff..8e88299028c 100644 --- a/src/testing/internal/incremental.ts +++ b/src/testing/internal/incremental.ts @@ -1,9 +1,3 @@ -import { - ReadableStream as NodeReadableStream, - TextEncoderStream, - TransformStream, -} from "node:stream/web"; - import type { FormattedInitialIncrementalExecutionResult, FormattedSubsequentIncrementalExecutionResult, @@ -11,109 +5,14 @@ import type { } from "graphql-17-alpha2"; import type { ApolloPayloadResult } from "@apollo/client"; -import { HttpLink } from "@apollo/client/link/http"; - -const hasNextSymbol = Symbol("hasNext"); - -function mockIncrementalStream({ - responseHeaders, -}: { - responseHeaders: Headers; -}) { - type Payload = Chunks & { [hasNextSymbol]: boolean }; - const CLOSE = Symbol(); - let streamController: ReadableStreamDefaultController | null = null; - let sentInitialChunk = false; - - const queue: Array = []; - - function processQueue() { - if (!streamController) { - throw new Error("Cannot process queue without stream controller"); - } - - let chunk; - while ((chunk = queue.shift())) { - if (chunk === CLOSE) { - streamController.close(); - } else { - streamController.enqueue(chunk); - } - } - } - - function createStream() { - return new NodeReadableStream({ - start(c) { - streamController = c; - processQueue(); - }, - }) - .pipeThrough( - new TransformStream({ - transform: (chunk, controller) => { - controller.enqueue( - (!sentInitialChunk ? "\r\n---\r\n" : "") + - "content-type: application/json; charset=utf-8\r\n\r\n" + - JSON.stringify(chunk) + - (chunk[hasNextSymbol] ? "\r\n---\r\n" : "\r\n-----\r\n") - ); - sentInitialChunk = true; - }, - }) - ) - .pipeThrough(new TextEncoderStream()); - } - - const httpLink = new HttpLink({ - fetch(input, init) { - return Promise.resolve( - new Response( - createStream() satisfies NodeReadableStream as ReadableStream, - { - status: 200, - headers: responseHeaders, - } - ) - ); - }, - }); - function queueNext(event: Payload | typeof CLOSE) { - queue.push(event); - - if (streamController) { - processQueue(); - } - } - - function close() { - queueNext(CLOSE); - - streamController = null; - sentInitialChunk = false; - } - - function enqueue(chunk: Chunks, hasNext: boolean) { - queueNext({ ...chunk, [hasNextSymbol]: hasNext }); - - if (!hasNext) { - close(); - } - } - - return { - httpLink, - enqueue, - close, - }; -} +import { mockMultipartStream } from "./incremental/utils.js"; export function mockDeferStream< TData = Record, TExtensions = Record, >() { - const { httpLink, enqueue } = mockIncrementalStream< + const { httpLink, enqueue } = mockMultipartStream< | FormattedInitialIncrementalExecutionResult | FormattedSubsequentIncrementalExecutionResult >({ @@ -153,7 +52,7 @@ export function mockMultipartSubscriptionStream< TData = Record, TExtensions = Record, >() { - const { httpLink, enqueue } = mockIncrementalStream< + const { httpLink, enqueue } = mockMultipartStream< ApolloPayloadResult >({ responseHeaders: new Headers({ From ba1f15e1dc082dd45b844143e2445583059d3bb3 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:25:48 -0600 Subject: [PATCH 038/254] Move mockDeferStream to own file --- src/testing/internal/incremental.ts | 48 ++----------------- .../internal/incremental/defer20220824.ts | 47 ++++++++++++++++++ 2 files changed, 50 insertions(+), 45 deletions(-) create mode 100644 src/testing/internal/incremental/defer20220824.ts diff --git a/src/testing/internal/incremental.ts b/src/testing/internal/incremental.ts index 8e88299028c..d2303c2ab3b 100644 --- a/src/testing/internal/incremental.ts +++ b/src/testing/internal/incremental.ts @@ -1,52 +1,10 @@ -import type { - FormattedInitialIncrementalExecutionResult, - FormattedSubsequentIncrementalExecutionResult, - GraphQLFormattedError, -} from "graphql-17-alpha2"; - import type { ApolloPayloadResult } from "@apollo/client"; +import { mockDefer20220824 } from "./incremental/defer20220824.js"; import { mockMultipartStream } from "./incremental/utils.js"; -export function mockDeferStream< - TData = Record, - TExtensions = Record, ->() { - const { httpLink, enqueue } = mockMultipartStream< - | FormattedInitialIncrementalExecutionResult - | FormattedSubsequentIncrementalExecutionResult - >({ - responseHeaders: new Headers({ - "Content-Type": 'multipart/mixed; boundary="-"; deferSpec=20220824', - }), - }); - return { - httpLink, - enqueueInitialChunk( - chunk: FormattedInitialIncrementalExecutionResult - ) { - enqueue(chunk, chunk.hasNext); - }, - enqueueSubsequentChunk( - chunk: FormattedSubsequentIncrementalExecutionResult - ) { - enqueue(chunk, chunk.hasNext); - }, - enqueueErrorChunk(errors: GraphQLFormattedError[]) { - enqueue( - { - hasNext: true, - incremental: [ - { - errors, - }, - ], - }, - true - ); - }, - }; -} +// TODO: Update to new name +export { mockDefer20220824 as mockDeferStream }; export function mockMultipartSubscriptionStream< TData = Record, diff --git a/src/testing/internal/incremental/defer20220824.ts b/src/testing/internal/incremental/defer20220824.ts new file mode 100644 index 00000000000..67afe6636d7 --- /dev/null +++ b/src/testing/internal/incremental/defer20220824.ts @@ -0,0 +1,47 @@ +import type { + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, + GraphQLFormattedError, +} from "graphql-17-alpha2"; + +import { mockMultipartStream } from "./utils.js"; + +export function mockDefer20220824< + TData = Record, + TExtensions = Record, +>() { + const { httpLink, enqueue } = mockMultipartStream< + | FormattedInitialIncrementalExecutionResult + | FormattedSubsequentIncrementalExecutionResult + >({ + responseHeaders: new Headers({ + "Content-Type": 'multipart/mixed; boundary="-"; deferSpec=20220824', + }), + }); + return { + httpLink, + enqueueInitialChunk( + chunk: FormattedInitialIncrementalExecutionResult + ) { + enqueue(chunk, chunk.hasNext); + }, + enqueueSubsequentChunk( + chunk: FormattedSubsequentIncrementalExecutionResult + ) { + enqueue(chunk, chunk.hasNext); + }, + enqueueErrorChunk(errors: GraphQLFormattedError[]) { + enqueue( + { + hasNext: true, + incremental: [ + { + errors, + }, + ], + }, + true + ); + }, + }; +} From 12e8ddbb4a1da25dda7b03b7c34d42847658bd2d Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:28:04 -0600 Subject: [PATCH 039/254] Rename mockDeferStream to mockDefer20220824 everywhere --- src/__tests__/__snapshots__/exports.ts.snap | 2 +- src/__tests__/fetchMore.ts | 4 ++-- src/core/__tests__/ApolloClient/general.test.ts | 4 ++-- src/incremental/handlers/__tests__/defer20220824.test.ts | 6 +++--- .../handlers/__tests__/graphql17Alpha9/defer.test.ts | 6 +++--- src/link/error/__tests__/index.ts | 4 ++-- src/testing/internal/incremental.ts | 4 +--- src/testing/internal/index.ts | 2 +- 8 files changed, 15 insertions(+), 17 deletions(-) diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap index 6505b8a0721..0e3fd3552c3 100644 --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -363,7 +363,7 @@ Array [ "enableFakeTimers", "executeWithDefaultContext", "markAsStreaming", - "mockDeferStream", + "mockDefer20220824", "mockMultipartSubscriptionStream", "renderAsync", "renderHookAsync", diff --git a/src/__tests__/fetchMore.ts b/src/__tests__/fetchMore.ts index 73fdf2af688..7c863397dc1 100644 --- a/src/__tests__/fetchMore.ts +++ b/src/__tests__/fetchMore.ts @@ -19,7 +19,7 @@ import { Defer20220824Handler } from "@apollo/client/incremental"; import { MockLink, MockSubscriptionLink } from "@apollo/client/testing"; import { markAsStreaming, - mockDeferStream, + mockDefer20220824, ObservableStream, setupPaginatedCase, } from "@apollo/client/testing/internal"; @@ -2478,7 +2478,7 @@ test("uses updateQuery to update the result of the query with no-cache queries", }); test("calling `fetchMore` on an ObservableQuery that hasn't finished deferring yet will not put it into completed state", async () => { - const defer = mockDeferStream(); + const defer = mockDefer20220824(); const baseLink = new MockSubscriptionLink(); const client = new ApolloClient({ diff --git a/src/core/__tests__/ApolloClient/general.test.ts b/src/core/__tests__/ApolloClient/general.test.ts index eccc8b5245d..49ecefaa437 100644 --- a/src/core/__tests__/ApolloClient/general.test.ts +++ b/src/core/__tests__/ApolloClient/general.test.ts @@ -14,7 +14,7 @@ import { ApolloLink } from "@apollo/client/link"; import { ClientAwarenessLink } from "@apollo/client/link/client-awareness"; import { MockLink } from "@apollo/client/testing"; import { - mockDeferStream, + mockDefer20220824, ObservableStream, spyOnConsole, wait, @@ -7567,7 +7567,7 @@ describe("ApolloClient", () => { const outgoingRequestSpy = jest.fn(((operation, forward) => forward(operation)) satisfies ApolloLink.RequestHandler); - const defer = mockDeferStream(); + const defer = mockDefer20220824(); const client = new ApolloClient({ cache: new InMemoryCache({}), link: new ApolloLink(outgoingRequestSpy).concat(defer.httpLink), diff --git a/src/incremental/handlers/__tests__/defer20220824.test.ts b/src/incremental/handlers/__tests__/defer20220824.test.ts index f5795710d6b..e412199e2a6 100644 --- a/src/incremental/handlers/__tests__/defer20220824.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824.test.ts @@ -28,7 +28,7 @@ import { import { Defer20220824Handler } from "@apollo/client/incremental"; import { markAsStreaming, - mockDeferStream, + mockDefer20220824, ObservableStream, } from "@apollo/client/testing/internal"; @@ -683,7 +683,7 @@ test("Defer20220824Handler can be used with `ApolloClient`", async () => { }); test("merges cache updates that happen concurrently", async () => { - const stream = mockDeferStream(); + const stream = mockDefer20220824(); const client = new ApolloClient({ link: stream.httpLink, cache: new InMemoryCache(), @@ -979,7 +979,7 @@ test("stream that returns an error but continues to stream", async () => { }); test("handles final chunk of { hasNext: false } correctly in usage with Apollo Client", async () => { - const stream = mockDeferStream(); + const stream = mockDefer20220824(); const client = new ApolloClient({ link: stream.httpLink, cache: new InMemoryCache(), diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts index 888ace8cea2..fdf145ed68e 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts @@ -27,7 +27,7 @@ import { } from "@apollo/client"; import { markAsStreaming, - mockDeferStream, + mockDefer20220824, ObservableStream, wait, } from "@apollo/client/testing/internal"; @@ -2371,7 +2371,7 @@ test("GraphQL17Alpha9Handler can be used with `ApolloClient`", async () => { // TODO: Add test helpers for new format test.failing("merges cache updates that happen concurrently", async () => { - const stream = mockDeferStream(); + const stream = mockDefer20220824(); const client = new ApolloClient({ link: stream.httpLink, cache: new InMemoryCache(), @@ -2642,7 +2642,7 @@ test("stream that returns an error but continues to stream", async () => { // TODO: Update to use test utils with updated types test.skip("handles final chunk of { hasNext: false } correctly in usage with Apollo Client", async () => { - const stream = mockDeferStream(); + const stream = mockDefer20220824(); const client = new ApolloClient({ link: stream.httpLink, cache: new InMemoryCache(), diff --git a/src/link/error/__tests__/index.ts b/src/link/error/__tests__/index.ts index 50e814e811e..92928c77746 100644 --- a/src/link/error/__tests__/index.ts +++ b/src/link/error/__tests__/index.ts @@ -13,7 +13,7 @@ import { ApolloLink } from "@apollo/client/link"; import { ErrorLink } from "@apollo/client/link/error"; import { executeWithDefaultContext as execute, - mockDeferStream, + mockDefer20220824, mockMultipartSubscriptionStream, ObservableStream, wait, @@ -214,7 +214,7 @@ describe("error handling", () => { const errorLink = new ErrorLink(callback); const { httpLink, enqueueInitialChunk, enqueueErrorChunk } = - mockDeferStream(); + mockDefer20220824(); const link = errorLink.concat(httpLink); const stream = new ObservableStream(execute(link, { query })); diff --git a/src/testing/internal/incremental.ts b/src/testing/internal/incremental.ts index d2303c2ab3b..6d6cc2c45a7 100644 --- a/src/testing/internal/incremental.ts +++ b/src/testing/internal/incremental.ts @@ -1,10 +1,8 @@ import type { ApolloPayloadResult } from "@apollo/client"; -import { mockDefer20220824 } from "./incremental/defer20220824.js"; import { mockMultipartStream } from "./incremental/utils.js"; -// TODO: Update to new name -export { mockDefer20220824 as mockDeferStream }; +export { mockDefer20220824 } from "./incremental/defer20220824.js"; export function mockMultipartSubscriptionStream< TData = Record, diff --git a/src/testing/internal/index.ts b/src/testing/internal/index.ts index 1ebe8234c9c..b7fb5594279 100644 --- a/src/testing/internal/index.ts +++ b/src/testing/internal/index.ts @@ -26,7 +26,7 @@ export { actAsync } from "./rtl/actAsync.js"; export { renderAsync } from "./rtl/renderAsync.js"; export { renderHookAsync } from "./rtl/renderHookAsync.js"; export { - mockDeferStream, + mockDefer20220824, mockMultipartSubscriptionStream, } from "./incremental.js"; export { resetApolloContext } from "./resetApolloContext.js"; From 3ed6eb46068a43b183f523279bfdb4ecba62148f Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:29:45 -0600 Subject: [PATCH 040/254] Rename file --- src/testing/internal/incremental.ts | 2 +- .../incremental/{defer20220824.ts => mockDefer20220824.ts} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename src/testing/internal/incremental/{defer20220824.ts => mockDefer20220824.ts} (100%) diff --git a/src/testing/internal/incremental.ts b/src/testing/internal/incremental.ts index 6d6cc2c45a7..0ce2389b5ae 100644 --- a/src/testing/internal/incremental.ts +++ b/src/testing/internal/incremental.ts @@ -2,7 +2,7 @@ import type { ApolloPayloadResult } from "@apollo/client"; import { mockMultipartStream } from "./incremental/utils.js"; -export { mockDefer20220824 } from "./incremental/defer20220824.js"; +export { mockDefer20220824 } from "./incremental/mockDefer20220824.js"; export function mockMultipartSubscriptionStream< TData = Record, diff --git a/src/testing/internal/incremental/defer20220824.ts b/src/testing/internal/incremental/mockDefer20220824.ts similarity index 100% rename from src/testing/internal/incremental/defer20220824.ts rename to src/testing/internal/incremental/mockDefer20220824.ts From b35c6e06087551bf1d691b3eb0f5e77fe391cf09 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:33:41 -0600 Subject: [PATCH 041/254] Add helper to mock newer defer implementation --- src/testing/internal/incremental.ts | 1 + .../mockDeferStreamGraphql17Alpha9.ts | 33 +++++++++++++++++++ src/testing/internal/index.ts | 1 + 3 files changed, 35 insertions(+) create mode 100644 src/testing/internal/incremental/mockDeferStreamGraphql17Alpha9.ts diff --git a/src/testing/internal/incremental.ts b/src/testing/internal/incremental.ts index 0ce2389b5ae..e921f060ff3 100644 --- a/src/testing/internal/incremental.ts +++ b/src/testing/internal/incremental.ts @@ -3,6 +3,7 @@ import type { ApolloPayloadResult } from "@apollo/client"; import { mockMultipartStream } from "./incremental/utils.js"; export { mockDefer20220824 } from "./incremental/mockDefer20220824.js"; +export { mockDeferStreamGraphQL17Alpha9 } from "./incremental/mockDeferStreamGraphql17Alpha9.js"; export function mockMultipartSubscriptionStream< TData = Record, diff --git a/src/testing/internal/incremental/mockDeferStreamGraphql17Alpha9.ts b/src/testing/internal/incremental/mockDeferStreamGraphql17Alpha9.ts new file mode 100644 index 00000000000..9532b1b57eb --- /dev/null +++ b/src/testing/internal/incremental/mockDeferStreamGraphql17Alpha9.ts @@ -0,0 +1,33 @@ +import type { + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, +} from "graphql-17-alpha9"; + +import { mockMultipartStream } from "./utils.js"; + +export function mockDeferStreamGraphQL17Alpha9< + TData = Record, + TExtensions = Record, +>() { + const { httpLink, enqueue } = mockMultipartStream< + | FormattedInitialIncrementalExecutionResult + | FormattedSubsequentIncrementalExecutionResult + >({ + responseHeaders: new Headers({ + "Content-Type": 'multipart/mixed; boundary="-"', + }), + }); + return { + httpLink, + enqueueInitialChunk( + chunk: FormattedInitialIncrementalExecutionResult + ) { + enqueue(chunk, chunk.hasNext); + }, + enqueueSubsequentChunk( + chunk: FormattedSubsequentIncrementalExecutionResult + ) { + enqueue(chunk, chunk.hasNext); + }, + }; +} diff --git a/src/testing/internal/index.ts b/src/testing/internal/index.ts index b7fb5594279..9d2905875e0 100644 --- a/src/testing/internal/index.ts +++ b/src/testing/internal/index.ts @@ -27,6 +27,7 @@ export { renderAsync } from "./rtl/renderAsync.js"; export { renderHookAsync } from "./rtl/renderHookAsync.js"; export { mockDefer20220824, + mockDeferStreamGraphQL17Alpha9, mockMultipartSubscriptionStream, } from "./incremental.js"; export { resetApolloContext } from "./resetApolloContext.js"; From 260d2962383d7d3a0adcd44e2f0c0b45debbe6e5 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:35:28 -0600 Subject: [PATCH 042/254] Rename folder to multipart --- .../mockDefer20220824.ts | 0 .../mockDeferStreamGraphql17Alpha9.ts | 0 .../mockMultipartSubscriptionStream.ts | 36 +++++++++++++++++++ .../{incremental => multipart}/utils.ts | 0 4 files changed, 36 insertions(+) rename src/testing/internal/{incremental => multipart}/mockDefer20220824.ts (100%) rename src/testing/internal/{incremental => multipart}/mockDeferStreamGraphql17Alpha9.ts (100%) create mode 100644 src/testing/internal/multipart/mockMultipartSubscriptionStream.ts rename src/testing/internal/{incremental => multipart}/utils.ts (100%) diff --git a/src/testing/internal/incremental/mockDefer20220824.ts b/src/testing/internal/multipart/mockDefer20220824.ts similarity index 100% rename from src/testing/internal/incremental/mockDefer20220824.ts rename to src/testing/internal/multipart/mockDefer20220824.ts diff --git a/src/testing/internal/incremental/mockDeferStreamGraphql17Alpha9.ts b/src/testing/internal/multipart/mockDeferStreamGraphql17Alpha9.ts similarity index 100% rename from src/testing/internal/incremental/mockDeferStreamGraphql17Alpha9.ts rename to src/testing/internal/multipart/mockDeferStreamGraphql17Alpha9.ts diff --git a/src/testing/internal/multipart/mockMultipartSubscriptionStream.ts b/src/testing/internal/multipart/mockMultipartSubscriptionStream.ts new file mode 100644 index 00000000000..73e29c1a9cc --- /dev/null +++ b/src/testing/internal/multipart/mockMultipartSubscriptionStream.ts @@ -0,0 +1,36 @@ +import type { ApolloPayloadResult } from "@apollo/client"; + +import { mockMultipartStream } from "./utils.js"; + +export function mockMultipartSubscriptionStream< + TData = Record, + TExtensions = Record, +>() { + const { httpLink, enqueue } = mockMultipartStream< + ApolloPayloadResult + >({ + responseHeaders: new Headers({ + "Content-Type": "multipart/mixed", + }), + }); + + enqueueHeartbeat(); + + function enqueueHeartbeat() { + enqueue({} as any, true); + } + + return { + httpLink, + enqueueHeartbeat, + enqueuePayloadResult( + payload: ApolloPayloadResult["payload"], + hasNext = true + ) { + enqueue({ payload }, hasNext); + }, + enqueueProtocolErrors(errors: ApolloPayloadResult["errors"]) { + enqueue({ payload: null, errors }, false); + }, + }; +} diff --git a/src/testing/internal/incremental/utils.ts b/src/testing/internal/multipart/utils.ts similarity index 100% rename from src/testing/internal/incremental/utils.ts rename to src/testing/internal/multipart/utils.ts From dc0d1fdc86f86572d30ad5149cff49487db65443 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:35:37 -0600 Subject: [PATCH 043/254] Move mulipart subscription mock to own file --- src/testing/internal/incremental.ts | 42 +++-------------------------- 1 file changed, 3 insertions(+), 39 deletions(-) diff --git a/src/testing/internal/incremental.ts b/src/testing/internal/incremental.ts index e921f060ff3..d5371143ab2 100644 --- a/src/testing/internal/incremental.ts +++ b/src/testing/internal/incremental.ts @@ -1,39 +1,3 @@ -import type { ApolloPayloadResult } from "@apollo/client"; - -import { mockMultipartStream } from "./incremental/utils.js"; - -export { mockDefer20220824 } from "./incremental/mockDefer20220824.js"; -export { mockDeferStreamGraphQL17Alpha9 } from "./incremental/mockDeferStreamGraphql17Alpha9.js"; - -export function mockMultipartSubscriptionStream< - TData = Record, - TExtensions = Record, ->() { - const { httpLink, enqueue } = mockMultipartStream< - ApolloPayloadResult - >({ - responseHeaders: new Headers({ - "Content-Type": "multipart/mixed", - }), - }); - - enqueueHeartbeat(); - - function enqueueHeartbeat() { - enqueue({} as any, true); - } - - return { - httpLink, - enqueueHeartbeat, - enqueuePayloadResult( - payload: ApolloPayloadResult["payload"], - hasNext = true - ) { - enqueue({ payload }, hasNext); - }, - enqueueProtocolErrors(errors: ApolloPayloadResult["errors"]) { - enqueue({ payload: null, errors }, false); - }, - }; -} +export { mockDefer20220824 } from "./multipart/mockDefer20220824.js"; +export { mockDeferStreamGraphQL17Alpha9 } from "./multipart/mockDeferStreamGraphql17Alpha9.js"; +export { mockMultipartSubscriptionStream } from "./multipart/mockMultipartSubscriptionStream.js"; From 7b51170eb9b1d41649c974947ed81a951120573f Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:36:33 -0600 Subject: [PATCH 044/254] Import directly to avoid another barrel file --- src/testing/internal/incremental.ts | 3 --- src/testing/internal/index.ts | 8 +++----- 2 files changed, 3 insertions(+), 8 deletions(-) delete mode 100644 src/testing/internal/incremental.ts diff --git a/src/testing/internal/incremental.ts b/src/testing/internal/incremental.ts deleted file mode 100644 index d5371143ab2..00000000000 --- a/src/testing/internal/incremental.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { mockDefer20220824 } from "./multipart/mockDefer20220824.js"; -export { mockDeferStreamGraphQL17Alpha9 } from "./multipart/mockDeferStreamGraphql17Alpha9.js"; -export { mockMultipartSubscriptionStream } from "./multipart/mockMultipartSubscriptionStream.js"; diff --git a/src/testing/internal/index.ts b/src/testing/internal/index.ts index 9d2905875e0..37fad789108 100644 --- a/src/testing/internal/index.ts +++ b/src/testing/internal/index.ts @@ -25,11 +25,9 @@ export { createClientWrapper, createMockWrapper } from "./renderHelpers.js"; export { actAsync } from "./rtl/actAsync.js"; export { renderAsync } from "./rtl/renderAsync.js"; export { renderHookAsync } from "./rtl/renderHookAsync.js"; -export { - mockDefer20220824, - mockDeferStreamGraphQL17Alpha9, - mockMultipartSubscriptionStream, -} from "./incremental.js"; +export { mockDefer20220824 } from "./multipart/mockDefer20220824.js"; +export { mockDeferStreamGraphQL17Alpha9 } from "./multipart/mockDeferStreamGraphql17Alpha9.js"; +export { mockMultipartSubscriptionStream } from "./multipart/mockMultipartSubscriptionStream.js"; export { resetApolloContext } from "./resetApolloContext.js"; export { createOperationWithDefaultContext, From 005aa272432304354a73cbf17c396ab6e5405657 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:49:39 -0600 Subject: [PATCH 045/254] Format test files with typescript parser --- .prettierrc | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.prettierrc b/.prettierrc index 8a0e9b37b39..5e21b9169ee 100644 --- a/.prettierrc +++ b/.prettierrc @@ -17,6 +17,12 @@ "parser": "typescript-with-jsdoc" } }, + { + "files": ["**/__tests__/**/*.ts", "**/__tests__/**/*.tsx"], + "options": { + "parser": "typescript" + } + }, { "files": ["*.mdx"], "options": { From 4e36d7ce411d7ac3261ca5275015944a0365651b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:49:59 -0600 Subject: [PATCH 046/254] Update exports snapshot --- src/__tests__/__snapshots__/exports.ts.snap | 1 + 1 file changed, 1 insertion(+) diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap index 0e3fd3552c3..9207e712cdd 100644 --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -364,6 +364,7 @@ Array [ "executeWithDefaultContext", "markAsStreaming", "mockDefer20220824", + "mockDeferStreamGraphQL17Alpha9", "mockMultipartSubscriptionStream", "renderAsync", "renderHookAsync", From 8b9b3141090636b29e91265a4677b495b79e3ab7 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:50:15 -0600 Subject: [PATCH 047/254] Move useQuery defer tests to own file --- src/react/hooks/__tests__/useQuery.test.tsx | 1222 ---------------- .../__tests__/useQuery/defer20220824.test.tsx | 1234 +++++++++++++++++ 2 files changed, 1234 insertions(+), 1222 deletions(-) create mode 100644 src/react/hooks/__tests__/useQuery/defer20220824.test.tsx diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx index 2d544bb11ac..5845bac6001 100644 --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -34,7 +34,6 @@ import { NetworkStatus, } from "@apollo/client"; import { InMemoryCache } from "@apollo/client/cache"; -import { Defer20220824Handler } from "@apollo/client/incremental"; import { ApolloLink } from "@apollo/client/link"; import { LocalState } from "@apollo/client/local-state"; import type { Unmasked } from "@apollo/client/masking"; @@ -53,7 +52,6 @@ import type { } from "@apollo/client/testing/internal"; import { enableFakeTimers, - markAsStreaming, setupPaginatedCase, setupSimpleCase, setupVariablesCase, @@ -10191,1226 +10189,6 @@ describe("useQuery Hook", () => { }); }); - describe("defer", () => { - it("should handle deferred queries", async () => { - const query = gql` - { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot } = await renderHookToSnapshotStream( - () => useQuery(query), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult({ - result: { - data: { - greeting: { - message: "Hello world", - __typename: "Greeting", - }, - }, - hasNext: true, - }, - }); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { - message: "Hello world", - __typename: "Greeting", - }, - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { - name: "Alice", - __typename: "Person", - }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: { - greeting: { - message: "Hello world", - __typename: "Greeting", - recipient: { - name: "Alice", - __typename: "Person", - }, - }, - }, - dataState: "complete", - loading: false, - networkStatus: NetworkStatus.ready, - previousData: { - greeting: { - message: "Hello world", - __typename: "Greeting", - }, - }, - variables: {}, - }); - - await expect(takeSnapshot).not.toRerender(); - }); - - it("should handle deferred queries in lists", async () => { - const query = gql` - { - greetings { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot } = await renderHookToSnapshotStream( - () => useQuery(query), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult({ - result: { - data: { - greetings: [ - { message: "Hello world", __typename: "Greeting" }, - { message: "Hello again", __typename: "Greeting" }, - ], - }, - hasNext: true, - }, - }); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: markAsStreaming({ - greetings: [ - { message: "Hello world", __typename: "Greeting" }, - { message: "Hello again", __typename: "Greeting" }, - ], - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult({ - result: { - incremental: [ - { - data: { - recipient: { - name: "Alice", - __typename: "Person", - }, - __typename: "Greeting", - }, - path: ["greetings", 0], - }, - ], - hasNext: true, - }, - }); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: markAsStreaming({ - greetings: [ - { - message: "Hello world", - __typename: "Greeting", - recipient: { name: "Alice", __typename: "Person" }, - }, - { message: "Hello again", __typename: "Greeting" }, - ], - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - previousData: { - greetings: [ - { message: "Hello world", __typename: "Greeting" }, - { message: "Hello again", __typename: "Greeting" }, - ], - }, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { - name: "Bob", - __typename: "Person", - }, - __typename: "Greeting", - }, - path: ["greetings", 1], - }, - ], - hasNext: false, - }, - }, - true - ); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: { - greetings: [ - { - message: "Hello world", - __typename: "Greeting", - recipient: { name: "Alice", __typename: "Person" }, - }, - { - message: "Hello again", - __typename: "Greeting", - recipient: { name: "Bob", __typename: "Person" }, - }, - ], - }, - dataState: "complete", - loading: false, - networkStatus: NetworkStatus.ready, - previousData: { - greetings: [ - { - message: "Hello world", - __typename: "Greeting", - recipient: { name: "Alice", __typename: "Person" }, - }, - { message: "Hello again", __typename: "Greeting" }, - ], - }, - variables: {}, - }); - - await expect(takeSnapshot).not.toRerender(); - }); - - it("should handle deferred queries in lists, merging arrays", async () => { - const query = gql` - query DeferVariation { - allProducts { - delivery { - ...MyFragment @defer - } - sku - id - } - } - fragment MyFragment on DeliveryEstimates { - estimatedDelivery - fastestDelivery - } - `; - - const link = new MockSubscriptionLink(); - - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot } = await renderHookToSnapshotStream( - () => useQuery(query), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult({ - result: { - data: { - allProducts: [ - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-federation", - sku: "federation", - }, - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-studio", - sku: "studio", - }, - ], - }, - hasNext: true, - }, - }); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: markAsStreaming({ - allProducts: [ - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-federation", - sku: "federation", - }, - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-studio", - sku: "studio", - }, - ], - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult({ - result: { - hasNext: true, - incremental: [ - { - data: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021", - }, - path: ["allProducts", 0, "delivery"], - }, - { - data: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021", - }, - path: ["allProducts", 1, "delivery"], - }, - ], - }, - }); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: markAsStreaming({ - allProducts: [ - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021", - }, - id: "apollo-federation", - sku: "federation", - }, - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021", - }, - id: "apollo-studio", - sku: "studio", - }, - ], - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - previousData: { - allProducts: [ - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-federation", - sku: "federation", - }, - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-studio", - sku: "studio", - }, - ], - }, - variables: {}, - }); - }); - - it("should handle deferred queries with fetch policy no-cache", async () => { - const query = gql` - { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot } = await renderHookToSnapshotStream( - () => useQuery(query, { fetchPolicy: "no-cache" }), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult({ - result: { - data: { - greeting: { - message: "Hello world", - __typename: "Greeting", - }, - }, - hasNext: true, - }, - }); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { - message: "Hello world", - __typename: "Greeting", - }, - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { - name: "Alice", - __typename: "Person", - }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: { - greeting: { - message: "Hello world", - __typename: "Greeting", - recipient: { - name: "Alice", - __typename: "Person", - }, - }, - }, - dataState: "complete", - loading: false, - networkStatus: NetworkStatus.ready, - previousData: { - greeting: { - message: "Hello world", - __typename: "Greeting", - }, - }, - variables: {}, - }); - - await expect(takeSnapshot).not.toRerender(); - }); - - it("should handle deferred queries with errors returned on the incremental batched result", async () => { - const query = gql` - query { - hero { - name - heroFriends { - id - name - ... @defer { - homeWorld - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot } = await renderHookToSnapshotStream( - () => useQuery(query), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult({ - result: { - data: { - hero: { - name: "R2-D2", - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - }, - }, - hasNext: true, - }, - }); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: markAsStreaming({ - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - name: "R2-D2", - }, - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult( - { - result: { - incremental: [ - { - path: ["hero", "heroFriends", 0], - errors: [ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ], - data: { - homeWorld: null, - }, - }, - { - path: ["hero", "heroFriends", 1], - data: { - homeWorld: "Alderaan", - }, - }, - ], - hasNext: false, - }, - }, - true - ); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: { - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - name: "R2-D2", - }, - }, - dataState: "complete", - error: new CombinedGraphQLErrors({ - data: { - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - homeWorld: null, - }, - { - id: "1003", - name: "Leia Organa", - homeWorld: "Alderaan", - }, - ], - name: "R2-D2", - }, - }, - errors: [ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ], - }), - loading: false, - networkStatus: NetworkStatus.error, - previousData: undefined, - variables: {}, - }); - - await expect(takeSnapshot).not.toRerender(); - }); - - it('should handle deferred queries with errors returned on the incremental batched result and errorPolicy "all"', async () => { - const query = gql` - query { - hero { - name - heroFriends { - id - name - ... @defer { - homeWorld - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot } = await renderHookToSnapshotStream( - () => useQuery(query, { errorPolicy: "all" }), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult({ - result: { - data: { - hero: { - name: "R2-D2", - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - }, - }, - hasNext: true, - }, - }); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: markAsStreaming({ - hero: { - name: "R2-D2", - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - }, - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - previousData: undefined, - variables: {}, - }); - - setTimeout(() => { - link.simulateResult( - { - result: { - incremental: [ - { - path: ["hero", "heroFriends", 0], - errors: [ - new GraphQLError( - "homeWorld for character with ID 1000 could not be fetched.", - { path: ["hero", "heroFriends", 0, "homeWorld"] } - ), - ], - data: { - homeWorld: null, - }, - extensions: { - thing1: "foo", - thing2: "bar", - }, - }, - { - path: ["hero", "heroFriends", 1], - data: { - homeWorld: "Alderaan", - }, - extensions: { - thing1: "foo", - thing2: "bar", - }, - }, - ], - hasNext: false, - }, - }, - true - ); - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: { - hero: { - heroFriends: [ - { - // the only difference with the previous test - // is that homeWorld is populated since errorPolicy: all - // populates both partial data and error.graphQLErrors - homeWorld: null, - id: "1000", - name: "Luke Skywalker", - }, - { - // homeWorld is populated due to errorPolicy: all - homeWorld: "Alderaan", - id: "1003", - name: "Leia Organa", - }, - ], - name: "R2-D2", - }, - }, - dataState: "complete", - error: new CombinedGraphQLErrors({ - data: { - hero: { - heroFriends: [ - { homeWorld: null, id: "1000", name: "Luke Skywalker" }, - { homeWorld: "Alderaan", id: "1003", name: "Leia Organa" }, - ], - name: "R2-D2", - }, - }, - errors: [ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ], - extensions: { - thing1: "foo", - thing2: "bar", - }, - }), - loading: false, - networkStatus: NetworkStatus.error, - previousData: { - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - name: "R2-D2", - }, - }, - variables: {}, - }); - - await expect(takeSnapshot).not.toRerender(); - }); - - it('returns eventually consistent data from deferred queries with data in the cache while using a "cache-and-network" fetch policy', async () => { - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - const cache = new InMemoryCache(); - const client = new ApolloClient({ - cache, - link, - incrementalHandler: new Defer20220824Handler(), - }); - - cache.writeQuery({ - query, - data: { - greeting: { - __typename: "Greeting", - message: "Hello cached", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - }); - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot } = await renderHookToSnapshotStream( - () => useQuery(query, { fetchPolicy: "cache-and-network" }), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello cached", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "complete", - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - - link.simulateResult({ - result: { - data: { - greeting: { __typename: "Greeting", message: "Hello world" }, - }, - hasNext: true, - }, - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - previousData: { - greeting: { - __typename: "Greeting", - message: "Hello cached", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - variables: {}, - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - loading: false, - networkStatus: NetworkStatus.ready, - previousData: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - variables: {}, - }); - - await expect(takeSnapshot).not.toRerender(); - }); - - it('returns eventually consistent data from deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const cache = new InMemoryCache(); - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache, - link, - incrementalHandler: new Defer20220824Handler(), - }); - - // We know we are writing partial data to the cache so suppress the console - // warning. - { - using _consoleSpy = spyOnConsole("error"); - cache.writeQuery({ - query, - data: { - greeting: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - }); - } - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot } = await renderHookToSnapshotStream( - () => - useQuery(query, { - fetchPolicy: "cache-first", - returnPartialData: true, - }), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "partial", - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - - link.simulateResult({ - result: { - data: { - greeting: { message: "Hello world", __typename: "Greeting" }, - }, - hasNext: true, - }, - }); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - previousData: { - greeting: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - variables: {}, - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - __typename: "Greeting", - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - await expect(takeSnapshot()).resolves.toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - loading: false, - networkStatus: NetworkStatus.ready, - previousData: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - variables: {}, - }); - - await expect(takeSnapshot).not.toRerender(); - }); - }); - describe("interaction with `prioritizeCacheValues`", () => { const cacheData = { something: "foo" }; const emptyData = undefined; diff --git a/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx new file mode 100644 index 00000000000..9e8229409bd --- /dev/null +++ b/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx @@ -0,0 +1,1234 @@ +import { + disableActEnvironment, + renderHookToSnapshotStream, +} from "@testing-library/react-render-stream"; +import React from "react"; + +import { + ApolloClient, + CombinedGraphQLErrors, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import { ApolloProvider, useQuery } from "@apollo/client/react"; +import { MockSubscriptionLink } from "@apollo/client/testing"; +import { markAsStreaming, spyOnConsole } from "@apollo/client/testing/internal"; + +test("should handle deferred queries", async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult({ + result: { + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }, + hasNext: true, + }, + }); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { + name: "Alice", + __typename: "Person", + }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", + recipient: { + name: "Alice", + __typename: "Person", + }, + }, + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("should handle deferred queries in lists", async () => { + const query = gql` + { + greetings { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult({ + result: { + data: { + greetings: [ + { message: "Hello world", __typename: "Greeting" }, + { message: "Hello again", __typename: "Greeting" }, + ], + }, + hasNext: true, + }, + }); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [ + { message: "Hello world", __typename: "Greeting" }, + { message: "Hello again", __typename: "Greeting" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult({ + result: { + incremental: [ + { + data: { + recipient: { + name: "Alice", + __typename: "Person", + }, + __typename: "Greeting", + }, + path: ["greetings", 0], + }, + ], + hasNext: true, + }, + }); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [ + { + message: "Hello world", + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + { message: "Hello again", __typename: "Greeting" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + greetings: [ + { message: "Hello world", __typename: "Greeting" }, + { message: "Hello again", __typename: "Greeting" }, + ], + }, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { + name: "Bob", + __typename: "Person", + }, + __typename: "Greeting", + }, + path: ["greetings", 1], + }, + ], + hasNext: false, + }, + }, + true + ); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greetings: [ + { + message: "Hello world", + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + { + message: "Hello again", + __typename: "Greeting", + recipient: { name: "Bob", __typename: "Person" }, + }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + greetings: [ + { + message: "Hello world", + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + { message: "Hello again", __typename: "Greeting" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("should handle deferred queries in lists, merging arrays", async () => { + const query = gql` + query DeferVariation { + allProducts { + delivery { + ...MyFragment @defer + } + sku + id + } + } + fragment MyFragment on DeliveryEstimates { + estimatedDelivery + fastestDelivery + } + `; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult({ + result: { + data: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + hasNext: true, + }, + }); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult({ + result: { + hasNext: true, + incremental: [ + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + path: ["allProducts", 0, "delivery"], + }, + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + path: ["allProducts", 1, "delivery"], + }, + ], + }, + }); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + variables: {}, + }); +}); + +test("should handle deferred queries with fetch policy no-cache", async () => { + const query = gql` + { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { fetchPolicy: "no-cache" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult({ + result: { + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }, + hasNext: true, + }, + }); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { + name: "Alice", + __typename: "Person", + }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", + recipient: { + name: "Alice", + __typename: "Person", + }, + }, + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("should handle deferred queries with errors returned on the incremental batched result", async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult({ + result: { + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + hasNext: true, + }, + }); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult( + { + result: { + incremental: [ + { + path: ["hero", "heroFriends", 0], + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + }, + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + }, + ], + hasNext: false, + }, + }, + true + ); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + errors: [ + { + message: "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), + loading: false, + networkStatus: NetworkStatus.error, + previousData: undefined, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +it('should handle deferred queries with errors returned on the incremental batched result and errorPolicy "all"', async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { errorPolicy: "all" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult({ + result: { + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + hasNext: true, + }, + }); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + setTimeout(() => { + link.simulateResult( + { + result: { + incremental: [ + { + path: ["hero", "heroFriends", 0], + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + extensions: { + thing1: "foo", + thing2: "bar", + }, + }, + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + extensions: { + thing1: "foo", + thing2: "bar", + }, + }, + ], + hasNext: false, + }, + }, + true + ); + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { + // the only difference with the previous test + // is that homeWorld is populated since errorPolicy: all + // populates both partial data and error.graphQLErrors + homeWorld: null, + id: "1000", + name: "Luke Skywalker", + }, + { + // homeWorld is populated due to errorPolicy: all + homeWorld: "Alderaan", + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { homeWorld: null, id: "1000", name: "Luke Skywalker" }, + { homeWorld: "Alderaan", id: "1003", name: "Leia Organa" }, + ], + name: "R2-D2", + }, + }, + errors: [ + { + message: "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + extensions: { + thing1: "foo", + thing2: "bar", + }, + }), + loading: false, + networkStatus: NetworkStatus.error, + previousData: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +it('returns eventually consistent data from deferred queries with data in the cache while using a "cache-and-network" fetch policy', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + const cache = new InMemoryCache(); + const client = new ApolloClient({ + cache, + link, + incrementalHandler: new Defer20220824Handler(), + }); + + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { fetchPolicy: "cache-and-network" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + link.simulateResult({ + result: { + data: { + greeting: { __typename: "Greeting", message: "Hello world" }, + }, + hasNext: true, + }, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + variables: {}, + }); + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +it('returns eventually consistent data from deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const cache = new InMemoryCache(); + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache, + link, + incrementalHandler: new Defer20220824Handler(), + }); + + // We know we are writing partial data to the cache so suppress the console + // warning. + { + using _consoleSpy = spyOnConsole("error"); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => + useQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "partial", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + link.simulateResult({ + result: { + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + hasNext: true, + }, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + variables: {}, + }); + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); From 66986d4d38046c966c1abbdbde6e21f0481d55d5 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 16:57:11 -0600 Subject: [PATCH 048/254] Replace MockSubscriptionLink with mock helper --- .../__tests__/useQuery/defer20220824.test.tsx | 558 ++++++++---------- 1 file changed, 244 insertions(+), 314 deletions(-) diff --git a/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx index 9e8229409bd..79cb0309731 100644 --- a/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx @@ -13,8 +13,11 @@ import { } from "@apollo/client"; import { Defer20220824Handler } from "@apollo/client/incremental"; import { ApolloProvider, useQuery } from "@apollo/client/react"; -import { MockSubscriptionLink } from "@apollo/client/testing"; -import { markAsStreaming, spyOnConsole } from "@apollo/client/testing/internal"; +import { + markAsStreaming, + mockDefer20220824, + spyOnConsole, +} from "@apollo/client/testing/internal"; test("should handle deferred queries", async () => { const query = gql` @@ -30,10 +33,11 @@ test("should handle deferred queries", async () => { } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); const client = new ApolloClient({ - link, + link: httpLink, cache: new InMemoryCache(), incrementalHandler: new Defer20220824Handler(), }); @@ -57,18 +61,14 @@ test("should handle deferred queries", async () => { variables: {}, }); - setTimeout(() => { - link.simulateResult({ - result: { - data: { - greeting: { - message: "Hello world", - __typename: "Greeting", - }, - }, - hasNext: true, + enqueueInitialChunk({ + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", }, - }); + }, + hasNext: true, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -85,27 +85,20 @@ test("should handle deferred queries", async () => { variables: {}, }); - setTimeout(() => { - link.simulateResult( + enqueueSubsequentChunk({ + incremental: [ { - result: { - incremental: [ - { - data: { - recipient: { - name: "Alice", - __typename: "Person", - }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, + data: { + recipient: { + name: "Alice", + __typename: "Person", + }, + __typename: "Greeting", }, + path: ["greeting"], }, - true - ); + ], + hasNext: false, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -148,10 +141,11 @@ test("should handle deferred queries in lists", async () => { } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); const client = new ApolloClient({ - link, + link: httpLink, cache: new InMemoryCache(), incrementalHandler: new Defer20220824Handler(), }); @@ -175,18 +169,14 @@ test("should handle deferred queries in lists", async () => { variables: {}, }); - setTimeout(() => { - link.simulateResult({ - result: { - data: { - greetings: [ - { message: "Hello world", __typename: "Greeting" }, - { message: "Hello again", __typename: "Greeting" }, - ], - }, - hasNext: true, - }, - }); + enqueueInitialChunk({ + data: { + greetings: [ + { message: "Hello world", __typename: "Greeting" }, + { message: "Hello again", __typename: "Greeting" }, + ], + }, + hasNext: true, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -203,24 +193,20 @@ test("should handle deferred queries in lists", async () => { variables: {}, }); - setTimeout(() => { - link.simulateResult({ - result: { - incremental: [ - { - data: { - recipient: { - name: "Alice", - __typename: "Person", - }, - __typename: "Greeting", - }, - path: ["greetings", 0], + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { + name: "Alice", + __typename: "Person", }, - ], - hasNext: true, + __typename: "Greeting", + }, + path: ["greetings", 0], }, - }); + ], + hasNext: true, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -246,27 +232,20 @@ test("should handle deferred queries in lists", async () => { variables: {}, }); - setTimeout(() => { - link.simulateResult( + enqueueSubsequentChunk({ + incremental: [ { - result: { - incremental: [ - { - data: { - recipient: { - name: "Bob", - __typename: "Person", - }, - __typename: "Greeting", - }, - path: ["greetings", 1], - }, - ], - hasNext: false, + data: { + recipient: { + name: "Bob", + __typename: "Person", + }, + __typename: "Greeting", }, + path: ["greetings", 1], }, - true - ); + ], + hasNext: false, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -320,10 +299,11 @@ test("should handle deferred queries in lists, merging arrays", async () => { } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); const client = new ApolloClient({ - link, + link: httpLink, cache: new InMemoryCache(), incrementalHandler: new Defer20220824Handler(), }); @@ -347,32 +327,28 @@ test("should handle deferred queries in lists, merging arrays", async () => { variables: {}, }); - setTimeout(() => { - link.simulateResult({ - result: { - data: { - allProducts: [ - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-federation", - sku: "federation", - }, - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-studio", - sku: "studio", - }, - ], + enqueueInitialChunk({ + data: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", }, - hasNext: true, - }, - }); + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + hasNext: true, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -403,30 +379,26 @@ test("should handle deferred queries in lists, merging arrays", async () => { variables: {}, }); - setTimeout(() => { - link.simulateResult({ - result: { - hasNext: true, - incremental: [ - { - data: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021", - }, - path: ["allProducts", 0, "delivery"], - }, - { - data: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021", - }, - path: ["allProducts", 1, "delivery"], - }, - ], + enqueueSubsequentChunk({ + hasNext: true, + incremental: [ + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + path: ["allProducts", 0, "delivery"], }, - }); + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + path: ["allProducts", 1, "delivery"], + }, + ], }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -495,10 +467,11 @@ test("should handle deferred queries with fetch policy no-cache", async () => { } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); const client = new ApolloClient({ - link, + link: httpLink, cache: new InMemoryCache(), incrementalHandler: new Defer20220824Handler(), }); @@ -522,18 +495,14 @@ test("should handle deferred queries with fetch policy no-cache", async () => { variables: {}, }); - setTimeout(() => { - link.simulateResult({ - result: { - data: { - greeting: { - message: "Hello world", - __typename: "Greeting", - }, - }, - hasNext: true, + enqueueInitialChunk({ + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", }, - }); + }, + hasNext: true, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -550,27 +519,20 @@ test("should handle deferred queries with fetch policy no-cache", async () => { variables: {}, }); - setTimeout(() => { - link.simulateResult( + enqueueSubsequentChunk({ + incremental: [ { - result: { - incremental: [ - { - data: { - recipient: { - name: "Alice", - __typename: "Person", - }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, + data: { + recipient: { + name: "Alice", + __typename: "Person", + }, + __typename: "Greeting", }, + path: ["greeting"], }, - true - ); + ], + hasNext: false, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -615,10 +577,11 @@ test("should handle deferred queries with errors returned on the incremental bat } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); const client = new ApolloClient({ - link, + link: httpLink, cache: new InMemoryCache(), incrementalHandler: new Defer20220824Handler(), }); @@ -642,27 +605,23 @@ test("should handle deferred queries with errors returned on the incremental bat variables: {}, }); - setTimeout(() => { - link.simulateResult({ - result: { - data: { - hero: { - name: "R2-D2", - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", }, - }, - hasNext: true, + { + id: "1003", + name: "Leia Organa", + }, + ], }, - }); + }, + hasNext: true, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -688,36 +647,29 @@ test("should handle deferred queries with errors returned on the incremental bat variables: {}, }); - setTimeout(() => { - link.simulateResult( + enqueueSubsequentChunk({ + incremental: [ { - result: { - incremental: [ - { - path: ["hero", "heroFriends", 0], - errors: [ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ], - data: { - homeWorld: null, - }, - }, - { - path: ["hero", "heroFriends", 1], - data: { - homeWorld: "Alderaan", - }, - }, - ], - hasNext: false, + path: ["hero", "heroFriends", 0], + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + }, + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", }, }, - true - ); + ], + hasNext: false, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -787,10 +739,11 @@ it('should handle deferred queries with errors returned on the incremental batch } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); const client = new ApolloClient({ - link, + link: httpLink, cache: new InMemoryCache(), incrementalHandler: new Defer20220824Handler(), }); @@ -814,27 +767,23 @@ it('should handle deferred queries with errors returned on the incremental batch variables: {}, }); - setTimeout(() => { - link.simulateResult({ - result: { - data: { - hero: { - name: "R2-D2", - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", }, - }, - hasNext: true, + { + id: "1003", + name: "Leia Organa", + }, + ], }, - }); + }, + hasNext: true, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -860,44 +809,37 @@ it('should handle deferred queries with errors returned on the incremental batch variables: {}, }); - setTimeout(() => { - link.simulateResult( + enqueueSubsequentChunk({ + incremental: [ { - result: { - incremental: [ - { - path: ["hero", "heroFriends", 0], - errors: [ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ], - data: { - homeWorld: null, - }, - extensions: { - thing1: "foo", - thing2: "bar", - }, - }, - { - path: ["hero", "heroFriends", 1], - data: { - homeWorld: "Alderaan", - }, - extensions: { - thing1: "foo", - thing2: "bar", - }, - }, - ], - hasNext: false, + path: ["hero", "heroFriends", 0], + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + extensions: { + thing1: "foo", + thing2: "bar", + }, + }, + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + extensions: { + thing1: "foo", + thing2: "bar", }, }, - true - ); + ], + hasNext: false, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -981,11 +923,12 @@ it('returns eventually consistent data from deferred queries with data in the ca } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); const cache = new InMemoryCache(); const client = new ApolloClient({ cache, - link, + link: httpLink, incrementalHandler: new Defer20220824Handler(), }); @@ -1025,13 +968,11 @@ it('returns eventually consistent data from deferred queries with data in the ca variables: {}, }); - link.simulateResult({ - result: { - data: { - greeting: { __typename: "Greeting", message: "Hello world" }, - }, - hasNext: true, + enqueueInitialChunk({ + data: { + greeting: { __typename: "Greeting", message: "Hello world" }, }, + hasNext: true, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -1055,23 +996,18 @@ it('returns eventually consistent data from deferred queries with data in the ca variables: {}, }); - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], }, - }, - true - ); + ], + hasNext: false, + }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ data: { @@ -1112,10 +1048,11 @@ it('returns eventually consistent data from deferred queries with partial data i `; const cache = new InMemoryCache(); - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); const client = new ApolloClient({ cache, - link, + link: httpLink, incrementalHandler: new Defer20220824Handler(), }); @@ -1162,13 +1099,11 @@ it('returns eventually consistent data from deferred queries with partial data i variables: {}, }); - link.simulateResult({ - result: { - data: { - greeting: { message: "Hello world", __typename: "Greeting" }, - }, - hasNext: true, + enqueueInitialChunk({ + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, }, + hasNext: true, }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ @@ -1191,23 +1126,18 @@ it('returns eventually consistent data from deferred queries with partial data i variables: {}, }); - link.simulateResult( - { - result: { - incremental: [ - { - data: { - __typename: "Greeting", - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greeting"], - }, - ], - hasNext: false, + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], }, - }, - true - ); + ], + hasNext: false, + }); await expect(takeSnapshot()).resolves.toStrictEqualTyped({ data: { From 0612a52e5e7b2c71b893dbbe5e9ab4b72315c677 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 18:07:10 -0600 Subject: [PATCH 049/254] Add tests for useQuery with the new defer spec --- .../useQuery/deferGraphQL17Alpha2.test.tsx | 1154 +++++++++++++++++ 1 file changed, 1154 insertions(+) create mode 100644 src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx diff --git a/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx b/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx new file mode 100644 index 00000000000..218f774691a --- /dev/null +++ b/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx @@ -0,0 +1,1154 @@ +import { + disableActEnvironment, + renderHookToSnapshotStream, +} from "@testing-library/react-render-stream"; +import React from "react"; + +import { + ApolloClient, + CombinedGraphQLErrors, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import { ApolloProvider, useQuery } from "@apollo/client/react"; +import { + markAsStreaming, + mockDeferStreamGraphQL17Alpha9, + spyOnConsole, +} from "@apollo/client/testing/internal"; + +test("should handle deferred queries", async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + enqueueInitialChunk({ + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { + name: "Alice", + __typename: "Person", + }, + __typename: "Greeting", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", + recipient: { + name: "Alice", + __typename: "Person", + }, + }, + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("should handle deferred queries in lists", async () => { + const query = gql` + { + greetings { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + enqueueInitialChunk({ + data: { + greetings: [ + { message: "Hello world", __typename: "Greeting" }, + { message: "Hello again", __typename: "Greeting" }, + ], + }, + pending: [ + { id: "0", path: ["greetings", 0] }, + { id: "1", path: ["greetings", 1] }, + ], + hasNext: true, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [ + { message: "Hello world", __typename: "Greeting" }, + { message: "Hello again", __typename: "Greeting" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { + name: "Alice", + __typename: "Person", + }, + __typename: "Greeting", + }, + id: "0", + }, + { + data: { + recipient: { + name: "Bob", + __typename: "Person", + }, + __typename: "Greeting", + }, + id: "1", + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greetings: [ + { + message: "Hello world", + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + { + message: "Hello again", + __typename: "Greeting", + recipient: { name: "Bob", __typename: "Person" }, + }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + greetings: [ + { message: "Hello world", __typename: "Greeting" }, + { message: "Hello again", __typename: "Greeting" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("should handle deferred queries in lists, merging arrays", async () => { + const query = gql` + query DeferVariation { + allProducts { + delivery { + ...MyFragment @defer + } + sku + id + } + } + fragment MyFragment on DeliveryEstimates { + estimatedDelivery + fastestDelivery + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + enqueueInitialChunk({ + data: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + pending: [ + { id: "0", path: ["allProducts", 0, "delivery"] }, + { id: "1", path: ["allProducts", 1, "delivery"] }, + ], + hasNext: true, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + enqueueSubsequentChunk({ + hasNext: false, + incremental: [ + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "0", + }, + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "1", + }, + ], + completed: [{ id: "0" }, { id: "1" }], + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + variables: {}, + }); +}); + +test("should handle deferred queries with fetch policy no-cache", async () => { + const query = gql` + { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { fetchPolicy: "no-cache" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + enqueueInitialChunk({ + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { + name: "Alice", + __typename: "Person", + }, + __typename: "Greeting", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", + recipient: { + name: "Alice", + __typename: "Person", + }, + }, + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("should handle deferred queries with errors returned on the incremental batched result", async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + pending: [ + { id: "0", path: ["hero", "heroFriends", 0] }, + { id: "1", path: ["hero", "heroFriends", 1] }, + ], + hasNext: true, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + enqueueSubsequentChunk({ + incremental: [ + { + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + id: "0", + }, + { + data: { + homeWorld: "Alderaan", + }, + id: "1", + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + errors: [ + { + message: "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), + loading: false, + networkStatus: NetworkStatus.error, + previousData: undefined, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test('should handle deferred queries with errors returned on the incremental batched result and errorPolicy "all"', async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { errorPolicy: "all" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + pending: [ + { id: "0", path: ["hero", "heroFriends", 0] }, + { id: "1", path: ["hero", "heroFriends", 1] }, + ], + hasNext: true, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + enqueueSubsequentChunk({ + incremental: [ + { + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + id: "0", + extensions: { + thing1: "foo", + thing2: "bar", + }, + }, + { + data: { + homeWorld: "Alderaan", + }, + id: "1", + extensions: { + thing1: "foo", + thing2: "bar", + }, + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { + // the only difference with the previous test + // is that homeWorld is populated since errorPolicy: all + // populates both partial data and error.graphQLErrors + homeWorld: null, + id: "1000", + name: "Luke Skywalker", + }, + { + // homeWorld is populated due to errorPolicy: all + homeWorld: "Alderaan", + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { homeWorld: null, id: "1000", name: "Luke Skywalker" }, + { homeWorld: "Alderaan", id: "1003", name: "Leia Organa" }, + ], + name: "R2-D2", + }, + }, + errors: [ + { + message: "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + extensions: { + thing1: "foo", + thing2: "bar", + }, + }), + loading: false, + networkStatus: NetworkStatus.error, + previousData: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test('returns eventually consistent data from deferred queries with data in the cache while using a "cache-and-network" fetch policy', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + const cache = new InMemoryCache(); + const client = new ApolloClient({ + cache, + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { fetchPolicy: "cache-and-network" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + enqueueInitialChunk({ + data: { + greeting: { __typename: "Greeting", message: "Hello world" }, + }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + variables: {}, + }); + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test('returns eventually consistent data from deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const cache = new InMemoryCache(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + const client = new ApolloClient({ + cache, + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + // We know we are writing partial data to the cache so suppress the console + // warning. + { + using _consoleSpy = spyOnConsole("error"); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => + useQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "partial", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + enqueueInitialChunk({ + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + variables: {}, + }); + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); From af769e703d54e58228e3bc0698a8df5a6d3bb009 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 18:40:55 -0600 Subject: [PATCH 050/254] Port first defer test for useSuspenseQuery to own file --- .../hooks/__tests__/useSuspenseQuery.test.tsx | 100 ------------- .../useSuspenseQuery/defer20220824.test.tsx | 137 ++++++++++++++++++ 2 files changed, 137 insertions(+), 100 deletions(-) create mode 100644 src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx index 5ba0dce3d1b..1f40289dc4d 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -7137,106 +7137,6 @@ describe("useSuspenseQuery", () => { expect(client.getObservableQueries().size).toBe(1); }); - it("suspends deferred queries until initial chunk loads then streams in data as it loads", async () => { - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query), - { link, incrementalHandler: new Defer20220824Handler() } - ); - - expect(renders.suspenseCount).toBe(1); - - link.simulateResult({ - result: { - data: { greeting: { message: "Hello world", __typename: "Greeting" } }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { message: "Hello world", __typename: "Greeting" }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - expect(renders.count).toBe(3 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toStrictEqualTyped([ - { - data: markAsStreaming({ - greeting: { message: "Hello world", __typename: "Greeting" }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - it.each([ "cache-first", "network-only", diff --git a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx new file mode 100644 index 00000000000..6c3ae5f41ef --- /dev/null +++ b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx @@ -0,0 +1,137 @@ +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import React, { Suspense } from "react"; +import { ErrorBoundary } from "react-error-boundary"; + +import { + ApolloClient, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import { ApolloProvider, useSuspenseQuery } from "@apollo/client/react"; +import { + markAsStreaming, + mockDefer20220824, +} from "@apollo/client/testing/internal"; + +test("suspends deferred queries until initial chunk loads then streams in data as it loads", async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + function Component() { + useTrackRenders(); + + const result = useSuspenseQuery(query); + replaceSnapshot(result); + + return null; + } + + function SuspenseFallback() { + useTrackRenders(); + + return null; + } + + function App() { + return ( + }> + + + ); + } + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, replaceSnapshot, render } = + createRenderStream< + useSuspenseQuery.Result + >(); + + await render(, { + wrapper: ({ children }) => ( + {children} + ), + }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual([SuspenseFallback]); + } + + enqueueInitialChunk({ + data: { greeting: { message: "Hello world", __typename: "Greeting" } }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual([Component]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { message: "Hello world", __typename: "Greeting" }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual([Component]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); From 45bf6ab60209867adc0ff98de5cf3174830b85bf Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 19:24:33 -0600 Subject: [PATCH 051/254] Extract render helper --- .../useSuspenseQuery/defer20220824.test.tsx | 83 +++++++++++-------- 1 file changed, 49 insertions(+), 34 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx index 6c3ae5f41ef..08a8824998d 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx @@ -1,11 +1,12 @@ +import type { RenderOptions } from "@testing-library/react"; import { createRenderStream, disableActEnvironment, useTrackRenders, } from "@testing-library/react-render-stream"; import React, { Suspense } from "react"; -import { ErrorBoundary } from "react-error-boundary"; +import type { OperationVariables } from "@apollo/client"; import { ApolloClient, gql, @@ -19,31 +20,21 @@ import { mockDefer20220824, } from "@apollo/client/testing/internal"; -test("suspends deferred queries until initial chunk loads then streams in data as it loads", async () => { - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - function Component() { - useTrackRenders(); +const IS_REACT_19 = React.version.startsWith("19"); - const result = useSuspenseQuery(query); - replaceSnapshot(result); +async function renderSuspenseHook( + renderHook: () => useSuspenseQuery.Result, + options: Pick +) { + function UseSuspenseQuery() { + useTrackRenders({ name: "useSuspenseQuery" }); + renderStream.replaceSnapshot(renderHook()); return null; } function SuspenseFallback() { - useTrackRenders(); + useTrackRenders({ name: "SuspenseFallback" }); return null; } @@ -51,11 +42,37 @@ test("suspends deferred queries until initial chunk loads then streams in data a function App() { return ( }> - + ); } + const { render, takeRender, ...renderStream } = + createRenderStream>(); + + const utils = await render(, options); + + function rerender() { + return utils.rerender(); + } + + return { takeRender, rerender }; +} + +test("suspends deferred queries until initial chunk loads then streams in data as it loads", async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = mockDefer20220824(); @@ -66,21 +83,19 @@ test("suspends deferred queries until initial chunk loads then streams in data a }); using _disabledAct = disableActEnvironment(); - const { takeRender, replaceSnapshot, render } = - createRenderStream< - useSuspenseQuery.Result - >(); - - await render(, { - wrapper: ({ children }) => ( - {children} - ), - }); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); { const { renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([SuspenseFallback]); + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); } enqueueInitialChunk({ @@ -91,7 +106,7 @@ test("suspends deferred queries until initial chunk loads then streams in data a { const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([Component]); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ greeting: { message: "Hello world", __typename: "Greeting" }, @@ -118,7 +133,7 @@ test("suspends deferred queries until initial chunk loads then streams in data a { const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([Component]); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: { greeting: { From ae21e84bfd562883bfda8fa1968cdfe51d8103cc Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 19:34:34 -0600 Subject: [PATCH 052/254] Migrate useSuspenseQuery defer tests to own file with renderStream --- .../hooks/__tests__/useSuspenseQuery.test.tsx | 2763 +---------------- .../useSuspenseQuery/defer20220824.test.tsx | 2408 +++++++++++++- 2 files changed, 2395 insertions(+), 2776 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx index 1f40289dc4d..31ea6583555 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -32,10 +32,7 @@ import { NetworkStatus, } from "@apollo/client"; import type { Incremental } from "@apollo/client/incremental"; -import { - Defer20220824Handler, - NotImplementedHandler, -} from "@apollo/client/incremental"; +import { NotImplementedHandler } from "@apollo/client/incremental"; import type { Unmasked } from "@apollo/client/masking"; import { ApolloProvider, @@ -50,7 +47,6 @@ import type { import { actAsync, createClientWrapper, - markAsStreaming, renderAsync, renderHookAsync, setupPaginatedCase, @@ -7137,2763 +7133,6 @@ describe("useSuspenseQuery", () => { expect(client.getObservableQueries().size).toBe(1); }); - it.each([ - "cache-first", - "network-only", - "no-cache", - "cache-and-network", - ])( - 'suspends deferred queries until initial chunk loads then streams in data as it loads when using a "%s" fetch policy', - async (fetchPolicy) => { - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query, { fetchPolicy }), - { link, incrementalHandler: new Defer20220824Handler() } - ); - - expect(renders.suspenseCount).toBe(1); - - link.simulateResult({ - result: { - data: { - greeting: { message: "Hello world", __typename: "Greeting" }, - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { message: "Hello world", __typename: "Greeting" }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - expect(renders.count).toBe(3 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toStrictEqualTyped([ - { - data: markAsStreaming({ - greeting: { message: "Hello world", __typename: "Greeting" }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - } - ); - - it('does not suspend deferred queries with data in the cache and using a "cache-first" fetch policy', async () => { - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const cache = new InMemoryCache(); - - cache.writeQuery({ - query, - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - }); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query, { fetchPolicy: "cache-first" }), - { cache, incrementalHandler: new Defer20220824Handler() } - ); - - expect(result.current).toStrictEqualTyped({ - data: { - greeting: { - message: "Hello world", - __typename: "Greeting", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - - expect(renders.suspenseCount).toBe(0); - expect(renders.frames).toStrictEqualTyped([ - { - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - - it('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - const cache = new InMemoryCache(); - - // We are intentionally writing partial data to the cache. Supress console - // warnings to avoid unnecessary noise in the test. - { - using _consoleSpy = spyOnConsole("error"); - cache.writeQuery({ - query, - data: { - greeting: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - }); - } - - const { result, renders } = await renderSuspenseHook( - () => - useSuspenseQuery(query, { - fetchPolicy: "cache-first", - returnPartialData: true, - }), - { cache, link, incrementalHandler: new Defer20220824Handler() } - ); - - expect(result.current).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "partial", - networkStatus: NetworkStatus.loading, - error: undefined, - }); - - link.simulateResult({ - result: { - data: { greeting: { message: "Hello world", __typename: "Greeting" } }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - __typename: "Greeting", - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - expect(renders.count).toBe(3 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(0); - expect(renders.frames).toStrictEqualTyped([ - { - data: { - greeting: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "partial", - networkStatus: NetworkStatus.loading, - error: undefined, - }, - { - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - - it('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - const cache = new InMemoryCache(); - const client = new ApolloClient({ - cache, - link, - incrementalHandler: new Defer20220824Handler(), - }); - - cache.writeQuery({ - query, - data: { - greeting: { - __typename: "Greeting", - message: "Hello cached", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - }); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query, { fetchPolicy: "cache-and-network" }), - { client } - ); - - expect(result.current).toStrictEqualTyped({ - data: { - greeting: { - message: "Hello cached", - __typename: "Greeting", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.loading, - error: undefined, - }); - - link.simulateResult({ - result: { - data: { greeting: { __typename: "Greeting", message: "Hello world" } }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - expect(renders.count).toBe(3 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(0); - expect(renders.frames).toStrictEqualTyped([ - { - data: { - greeting: { - __typename: "Greeting", - message: "Hello cached", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.loading, - error: undefined, - }, - { - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - - it("suspends deferred queries with lists and properly patches results", async () => { - const query = gql` - query { - greetings { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query), - { link, incrementalHandler: new Defer20220824Handler() } - ); - - expect(renders.suspenseCount).toBe(1); - - link.simulateResult({ - result: { - data: { - greetings: [ - { __typename: "Greeting", message: "Hello world" }, - { __typename: "Greeting", message: "Hello again" }, - ], - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greetings: [ - { __typename: "Greeting", message: "Hello world" }, - { __typename: "Greeting", message: "Hello again" }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult({ - result: { - incremental: [ - { - data: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Alice" }, - }, - path: ["greetings", 0], - }, - ], - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - { - __typename: "Greeting", - message: "Hello again", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Bob" }, - }, - path: ["greetings", 1], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - { - __typename: "Greeting", - message: "Hello again", - recipient: { __typename: "Person", name: "Bob" }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - expect(renders.count).toBe(4 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toStrictEqualTyped([ - { - data: markAsStreaming({ - greetings: [ - { __typename: "Greeting", message: "Hello world" }, - { __typename: "Greeting", message: "Hello again" }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: markAsStreaming({ - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - { - __typename: "Greeting", - message: "Hello again", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - { - __typename: "Greeting", - message: "Hello again", - recipient: { __typename: "Person", name: "Bob" }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - - it("suspends queries with deferred fragments in lists and properly merges arrays", async () => { - const query = gql` - query DeferVariation { - allProducts { - delivery { - ...MyFragment @defer - } - sku - id - } - } - - fragment MyFragment on DeliveryEstimates { - estimatedDelivery - fastestDelivery - } - `; - - const link = new MockSubscriptionLink(); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query), - { link, incrementalHandler: new Defer20220824Handler() } - ); - - expect(renders.suspenseCount).toBe(1); - - link.simulateResult({ - result: { - data: { - allProducts: [ - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-federation", - sku: "federation", - }, - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-studio", - sku: "studio", - }, - ], - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - allProducts: [ - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-federation", - sku: "federation", - }, - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - }, - id: "apollo-studio", - sku: "studio", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult({ - result: { - hasNext: true, - incremental: [ - { - data: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021", - }, - path: ["allProducts", 0, "delivery"], - }, - { - data: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021", - }, - path: ["allProducts", 1, "delivery"], - }, - ], - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - allProducts: [ - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021", - }, - id: "apollo-federation", - sku: "federation", - }, - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021", - }, - id: "apollo-studio", - sku: "studio", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - }); - - it("incrementally rerenders data returned by a `refetch` for a deferred query", async () => { - const query = gql` - query { - greeting { - message - ... @defer { - recipient { - name - } - } - } - } - `; - - const cache = new InMemoryCache(); - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - link, - cache, - incrementalHandler: new Defer20220824Handler(), - }); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query), - { client } - ); - - link.simulateResult({ - result: { - data: { greeting: { __typename: "Greeting", message: "Hello world" } }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Hello world", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - let refetchPromise: Promise>; - await actAsync(async () => { - refetchPromise = result.current.refetch(); - }); - - link.simulateResult({ - result: { - data: { - greeting: { - __typename: "Greeting", - message: "Goodbye", - }, - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Bob", __typename: "Person" }, - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - await expect(refetchPromise!).resolves.toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - }, - }); - - expect(renders.count).toBe(6 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(2); - expect(renders.frames).toStrictEqualTyped([ - { - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Hello world", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - { - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - greeting: { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - - it("incrementally renders data returned after skipping a deferred query", async () => { - const query = gql` - query { - greeting { - message - ... @defer { - recipient { - name - } - } - } - } - `; - - const cache = new InMemoryCache(); - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - link, - cache, - incrementalHandler: new Defer20220824Handler(), - }); - - const { result, rerenderAsync, renders } = await renderSuspenseHook( - ({ skip }) => useSuspenseQuery(query, { skip }), - { client, initialProps: { skip: true } } - ); - - expect(result.current).toStrictEqualTyped({ - data: undefined, - dataState: "empty", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - - await rerenderAsync({ skip: false }); - - expect(renders.suspenseCount).toBe(1); - - link.simulateResult({ - result: { - data: { greeting: { __typename: "Greeting", message: "Hello world" } }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Hello world", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - expect(renders.count).toBe(4 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toStrictEqualTyped([ - { - data: undefined, - dataState: "empty", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - { - data: markAsStreaming({ - greeting: { - __typename: "Greeting", - message: "Hello world", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - - // TODO: This test is a bit of a lie. `fetchMore` should incrementally - // rerender when using `@defer` but there is currently a bug in the core - // implementation that prevents updates until the final result is returned. - // This test reflects the behavior as it exists today, but will need - // to be updated once the core bug is fixed. - // - // NOTE: A duplicate it.failng test has been added right below this one with - // the expected behavior added in (i.e. the commented code in this test). Once - // the core bug is fixed, this test can be removed in favor of the other test. - // - // https://github.com/apollographql/apollo-client/issues/11034 - it("rerenders data returned by `fetchMore` for a deferred query", async () => { - const query = gql` - query ($offset: Int) { - greetings(offset: $offset) { - message - ... @defer { - recipient { - name - } - } - } - } - `; - - const cache = new InMemoryCache({ - typePolicies: { - Query: { - fields: { - greetings: offsetLimitPagination(), - }, - }, - }, - }); - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - link, - cache, - incrementalHandler: new Defer20220824Handler(), - }); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query, { variables: { offset: 0 } }), - { client } - ); - - link.simulateResult({ - result: { - data: { - greetings: [{ __typename: "Greeting", message: "Hello world" }], - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greetings", 0], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - let fetchMorePromise: Promise>; - await actAsync(() => { - fetchMorePromise = result.current.fetchMore({ variables: { offset: 1 } }); - }); - - link.simulateResult({ - result: { - data: { - greetings: [ - { - __typename: "Greeting", - message: "Goodbye", - }, - ], - }, - hasNext: true, - }, - }); - - // TODO: Re-enable once the core bug is fixed - // await waitFor(() => { - // expect(result.current).toStrictEqualTyped({ - // data: { - // greetings: [ - // { - // __typename: 'Greeting', - // message: 'Hello world', - // recipient: { - // __typename: 'Person', - // name: 'Alice', - // }, - // }, - // { - // __typename: 'Greeting', - // message: 'Goodbye', - // }, - // ], - // }, - // dataState: "streaming", - // networkStatus: NetworkStatus.streaming, - // error: undefined, - // }); - // }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Bob", __typename: "Person" }, - }, - path: ["greetings", 0], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - await expect(fetchMorePromise!).resolves.toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - ], - }, - }); - - expect(renders.count).toBe(5 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(2); - expect(renders.frames).toStrictEqualTyped([ - { - data: markAsStreaming({ - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - // TODO: Re-enable when the core `fetchMore` bug is fixed - // { - // data: { - // greetings: [ - // { - // __typename: 'Greeting', - // message: 'Hello world', - // recipient: { - // __typename: 'Person', - // name: 'Alice', - // }, - // }, - // { - // __typename: 'Greeting', - // message: 'Goodbye', - // }, - // ], - // }, - // dataState: "streaming", - // networkStatus: NetworkStatus.streaming, - // error: undefined, - // }, - { - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - - // TODO: This is a duplicate of the test above, but with the expected behavior - // added (hence the `it.failing`). Remove the previous test once issue #11034 - // is fixed. - // - // https://github.com/apollographql/apollo-client/issues/11034 - it.failing( - "incrementally rerenders data returned by a `fetchMore` for a deferred query", - async () => { - const query = gql` - query ($offset: Int) { - greetings(offset: $offset) { - message - ... @defer { - recipient { - name - } - } - } - } - `; - - const cache = new InMemoryCache({ - typePolicies: { - Query: { - fields: { - greetings: offsetLimitPagination(), - }, - }, - }, - }); - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - link, - cache, - incrementalHandler: new Defer20220824Handler(), - }); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query, { variables: { offset: 0 } }), - { client } - ); - - link.simulateResult({ - result: { - data: { - greetings: [{ __typename: "Greeting", message: "Hello world" }], - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greetings", 0], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - let fetchMorePromise: Promise>; - await actAsync(() => { - fetchMorePromise = result.current.fetchMore({ - variables: { offset: 1 }, - }); - }); - - link.simulateResult({ - result: { - data: { - greetings: [ - { - __typename: "Greeting", - message: "Goodbye", - }, - ], - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - { - __typename: "Greeting", - message: "Goodbye", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Bob", __typename: "Person" }, - }, - path: ["greetings", 0], - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - await expect(fetchMorePromise!).resolves.toEqual({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - ], - }, - loading: false, - networkStatus: NetworkStatus.ready, - error: undefined, - }); - - expect(renders.count).toBe(5 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(2); - expect(renders.frames).toStrictEqualTyped([ - { - data: markAsStreaming({ - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - { - data: markAsStreaming({ - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - { - __typename: "Greeting", - message: "Goodbye", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - } - ); - - it("throws network errors returned by deferred queries", async () => { - using _consoleSpy = spyOnConsole("error"); - - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const { renders } = await renderSuspenseHook( - () => useSuspenseQuery(query), - { - link, - incrementalHandler: new Defer20220824Handler(), - } - ); - - link.simulateResult({ - error: new Error("Could not fetch"), - }); - - await waitFor(() => expect(renders.errorCount).toBe(1)); - - expect(renders.errors.length).toBe(1); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toEqual([]); - - const [error] = renders.errors; - - expect(error).toBeInstanceOf(Error); - expect(error).toEqual(new Error("Could not fetch")); - }); - - it("throws graphql errors returned by deferred queries", async () => { - using _consoleSpy = spyOnConsole("error"); - - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const { renders } = await renderSuspenseHook( - () => useSuspenseQuery(query), - { - link, - incrementalHandler: new Defer20220824Handler(), - } - ); - - link.simulateResult({ - result: { - errors: [new GraphQLError("Could not fetch greeting")], - }, - }); - - await waitFor(() => expect(renders.errorCount).toBe(1)); - - expect(renders.errors.length).toBe(1); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toEqual([]); - - const [error] = renders.errors; - - expect(error).toBeInstanceOf(CombinedGraphQLErrors); - expect(error).toEqual( - new CombinedGraphQLErrors({ - errors: [{ message: "Could not fetch greeting" }], - }) - ); - }); - - it("throws errors returned by deferred queries that include partial data", async () => { - using _consoleSpy = spyOnConsole("error"); - - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const { renders } = await renderSuspenseHook( - () => useSuspenseQuery(query), - { - link, - incrementalHandler: new Defer20220824Handler(), - } - ); - - link.simulateResult({ - result: { - data: { greeting: null }, - errors: [new GraphQLError("Could not fetch greeting")], - }, - }); - - await waitFor(() => expect(renders.errorCount).toBe(1)); - - expect(renders.errors.length).toBe(1); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toEqual([]); - - const [error] = renders.errors; - - expect(error).toBeInstanceOf(CombinedGraphQLErrors); - expect(error).toEqual( - new CombinedGraphQLErrors({ - data: { greeting: null }, - errors: [{ message: "Could not fetch greeting" }], - }) - ); - }); - - it("discards partial data and throws errors returned in incremental chunks", async () => { - using _consoleSpy = spyOnConsole("error"); - - const query = gql` - query { - hero { - name - heroFriends { - id - name - ... @defer { - homeWorld - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query), - { link, incrementalHandler: new Defer20220824Handler() } - ); - - link.simulateResult({ - result: { - data: { - hero: { - name: "R2-D2", - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - }, - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - name: "R2-D2", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - path: ["hero", "heroFriends", 0], - errors: [ - new GraphQLError( - "homeWorld for character with ID 1000 could not be fetched.", - { path: ["hero", "heroFriends", 0, "homeWorld"] } - ), - ], - data: { - homeWorld: null, - }, - }, - // This chunk is ignored since errorPolicy `none` throws away partial - // data - { - path: ["hero", "heroFriends", 1], - data: { - homeWorld: "Alderaan", - }, - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(renders.errorCount).toBe(1); - }); - - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toStrictEqualTyped([ - { - data: markAsStreaming({ - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - name: "R2-D2", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - ]); - - const [error] = renders.errors; - - expect(error).toBeInstanceOf(CombinedGraphQLErrors); - expect(error).toEqual( - new CombinedGraphQLErrors({ - data: { - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - homeWorld: null, - }, - { - id: "1003", - name: "Leia Organa", - homeWorld: "Alderaan", - }, - ], - name: "R2-D2", - }, - }, - errors: [ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ], - }) - ); - }); - - it("adds partial data and does not throw errors returned in incremental chunks but returns them in `error` property with errorPolicy set to `all`", async () => { - const query = gql` - query { - hero { - name - heroFriends { - id - name - ... @defer { - homeWorld - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query, { errorPolicy: "all" }), - { link, incrementalHandler: new Defer20220824Handler() } - ); - - link.simulateResult({ - result: { - data: { - hero: { - name: "R2-D2", - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - }, - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - name: "R2-D2", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - path: ["hero", "heroFriends", 0], - errors: [ - new GraphQLError( - "homeWorld for character with ID 1000 could not be fetched.", - { path: ["hero", "heroFriends", 0, "homeWorld"] } - ), - ], - data: { - homeWorld: null, - }, - }, - // Unlike the default (errorPolicy = `none`), this data will be - // added to the final result - { - path: ["hero", "heroFriends", 1], - data: { - homeWorld: "Alderaan", - }, - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - homeWorld: null, - }, - { - id: "1003", - name: "Leia Organa", - homeWorld: "Alderaan", - }, - ], - name: "R2-D2", - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors({ - data: { - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - homeWorld: null, - }, - { - id: "1003", - name: "Leia Organa", - homeWorld: "Alderaan", - }, - ], - name: "R2-D2", - }, - }, - errors: [ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ], - }), - }); - }); - - expect(renders.count).toBe(3 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toStrictEqualTyped([ - { - data: markAsStreaming({ - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - name: "R2-D2", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - homeWorld: null, - }, - { - id: "1003", - name: "Leia Organa", - homeWorld: "Alderaan", - }, - ], - name: "R2-D2", - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors({ - data: { - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - homeWorld: null, - }, - { - id: "1003", - name: "Leia Organa", - homeWorld: "Alderaan", - }, - ], - name: "R2-D2", - }, - }, - errors: [ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ], - }), - }, - ]); - }); - - it("adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", async () => { - const query = gql` - query { - hero { - name - heroFriends { - id - name - ... @defer { - homeWorld - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query, { errorPolicy: "ignore" }), - { link, incrementalHandler: new Defer20220824Handler() } - ); - - link.simulateResult({ - result: { - data: { - hero: { - name: "R2-D2", - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - }, - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - name: "R2-D2", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - path: ["hero", "heroFriends", 0], - errors: [ - new GraphQLError( - "homeWorld for character with ID 1000 could not be fetched.", - { path: ["hero", "heroFriends", 0, "homeWorld"] } - ), - ], - data: { - homeWorld: null, - }, - }, - { - path: ["hero", "heroFriends", 1], - data: { - homeWorld: "Alderaan", - }, - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - homeWorld: null, - }, - { - id: "1003", - name: "Leia Organa", - homeWorld: "Alderaan", - }, - ], - name: "R2-D2", - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - expect(renders.count).toBe(3 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toStrictEqualTyped([ - { - data: markAsStreaming({ - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - }, - { - id: "1003", - name: "Leia Organa", - }, - ], - name: "R2-D2", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - hero: { - heroFriends: [ - { - id: "1000", - name: "Luke Skywalker", - homeWorld: null, - }, - { - id: "1003", - name: "Leia Organa", - homeWorld: "Alderaan", - }, - ], - name: "R2-D2", - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - - it("can refetch and respond to cache updates after encountering an error in an incremental chunk for a deferred query when `errorPolicy` is `all`", async () => { - const query = gql` - query { - hero { - name - heroFriends { - id - name - ... @defer { - homeWorld - } - } - } - } - `; - - const cache = new InMemoryCache(); - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - link, - cache, - incrementalHandler: new Defer20220824Handler(), - }); - - const { result, renders } = await renderSuspenseHook( - () => useSuspenseQuery(query, { errorPolicy: "all" }), - { client } - ); - - link.simulateResult({ - result: { - data: { - hero: { - name: "R2-D2", - heroFriends: [ - { id: "1000", name: "Luke Skywalker" }, - { id: "1003", name: "Leia Organa" }, - ], - }, - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker" }, - { id: "1003", name: "Leia Organa" }, - ], - name: "R2-D2", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - path: ["hero", "heroFriends", 0], - errors: [ - new GraphQLError( - "homeWorld for character with ID 1000 could not be fetched.", - { path: ["hero", "heroFriends", 0, "homeWorld"] } - ), - ], - data: { - homeWorld: null, - }, - }, - { - path: ["hero", "heroFriends", 1], - data: { - homeWorld: "Alderaan", - }, - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker", homeWorld: null }, - { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, - ], - name: "R2-D2", - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors({ - data: { - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker", homeWorld: null }, - { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, - ], - name: "R2-D2", - }, - }, - errors: [ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ], - }), - }); - }); - - let refetchPromise: Promise>; - await actAsync(async () => { - refetchPromise = result.current.refetch(); - }); - - link.simulateResult({ - result: { - data: { - hero: { - name: "R2-D2", - heroFriends: [ - { id: "1000", name: "Luke Skywalker" }, - { id: "1003", name: "Leia Organa" }, - ], - }, - }, - hasNext: true, - }, - }); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: markAsStreaming({ - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker", homeWorld: null }, - { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, - ], - name: "R2-D2", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - }); - - link.simulateResult( - { - result: { - incremental: [ - { - path: ["hero", "heroFriends", 0], - data: { - homeWorld: "Alderaan", - }, - }, - { - path: ["hero", "heroFriends", 1], - data: { - homeWorld: "Alderaan", - }, - }, - ], - hasNext: false, - }, - }, - true - ); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, - { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, - ], - name: "R2-D2", - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - await expect(refetchPromise!).resolves.toStrictEqualTyped({ - data: { - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, - { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, - ], - name: "R2-D2", - }, - }, - }); - - cache.updateQuery({ query }, (data) => ({ - hero: { - ...data.hero, - name: "C3PO", - }, - })); - - await waitFor(() => { - expect(result.current).toStrictEqualTyped({ - data: { - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, - { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, - ], - name: "C3PO", - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - expect(renders.count).toBe(7 + (IS_REACT_19 ? renders.suspenseCount : 0)); - expect(renders.suspenseCount).toBe(2); - expect(renders.frames).toStrictEqualTyped([ - { - data: markAsStreaming({ - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker" }, - { id: "1003", name: "Leia Organa" }, - ], - name: "R2-D2", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker", homeWorld: null }, - { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, - ], - name: "R2-D2", - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors({ - data: { - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker", homeWorld: null }, - { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, - ], - name: "R2-D2", - }, - }, - errors: [ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ], - }), - }, - { - data: markAsStreaming({ - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker", homeWorld: null }, - { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, - ], - name: "R2-D2", - }, - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }, - { - data: { - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, - { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, - ], - name: "R2-D2", - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - { - data: { - hero: { - heroFriends: [ - { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, - { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, - ], - name: "C3PO", - }, - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - it("can subscribe to subscriptions and react to cache updates via `subscribeToMore`", async () => { interface SubscriptionData { greetingUpdated: string; diff --git a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx index 08a8824998d..53e520fa515 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx @@ -5,10 +5,14 @@ import { useTrackRenders, } from "@testing-library/react-render-stream"; import React, { Suspense } from "react"; +import { ErrorBoundary } from "react-error-boundary"; +import { delay, of, throwError } from "rxjs"; -import type { OperationVariables } from "@apollo/client"; +import type { ErrorLike, OperationVariables } from "@apollo/client"; import { ApolloClient, + ApolloLink, + CombinedGraphQLErrors, gql, InMemoryCache, NetworkStatus, @@ -18,17 +22,27 @@ import { ApolloProvider, useSuspenseQuery } from "@apollo/client/react"; import { markAsStreaming, mockDefer20220824, + spyOnConsole, + wait, } from "@apollo/client/testing/internal"; +import { offsetLimitPagination } from "@apollo/client/utilities"; +import { invariant } from "@apollo/client/utilities/invariant"; const IS_REACT_19 = React.version.startsWith("19"); -async function renderSuspenseHook( - renderHook: () => useSuspenseQuery.Result, - options: Pick +async function renderSuspenseHook< + TData, + TVariables extends OperationVariables, + Props = never, +>( + renderHook: ( + props: Props extends never ? undefined : Props + ) => useSuspenseQuery.Result, + options: Pick & { initialProps?: Props } ) { - function UseSuspenseQuery() { + function UseSuspenseQuery({ props }: { props: Props | undefined }) { useTrackRenders({ name: "useSuspenseQuery" }); - renderStream.replaceSnapshot(renderHook()); + replaceSnapshot(renderHook(props as any)); return null; } @@ -39,24 +53,45 @@ async function renderSuspenseHook( return null; } - function App() { + function ErrorFallback() { + useTrackRenders({ name: "ErrorFallback" }); + + return null; + } + + function App({ props }: { props: Props | undefined }) { return ( }> - + replaceSnapshot({ error })} + > + + ); } - const { render, takeRender, ...renderStream } = - createRenderStream>(); + const { render, takeRender, replaceSnapshot, getCurrentRender } = + createRenderStream< + useSuspenseQuery.Result | { error: ErrorLike } + >(); + + const utils = await render(, options); + + function rerender(props: Props) { + return utils.rerender(); + } + + function getCurrentSnapshot() { + const { snapshot } = getCurrentRender(); - const utils = await render(, options); + invariant("data" in snapshot, "Snapshot is not a hook snapshot"); - function rerender() { - return utils.rerender(); + return snapshot; } - return { takeRender, rerender }; + return { getCurrentSnapshot, takeRender, rerender }; } test("suspends deferred queries until initial chunk loads then streams in data as it loads", async () => { @@ -150,3 +185,2348 @@ test("suspends deferred queries until initial chunk loads then streams in data a await expect(takeRender).not.toRerender(); }); + +test.each([ + "cache-first", + "network-only", + "no-cache", + "cache-and-network", +])( + 'suspends deferred queries until initial chunk loads then streams in data as it loads when using a "%s" fetch policy', + async (fetchPolicy) => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { message: "Hello world", __typename: "Greeting" }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); + } +); + +test('does not suspend deferred queries with data in the cache and using a "cache-first" fetch policy', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + }); + + const client = new ApolloClient({ + cache, + link: ApolloLink.empty(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-first" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + await expect(takeRender).not.toRerender(); +}); + +test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + const cache = new InMemoryCache(); + + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + { + using _consoleSpy = spyOnConsole("error"); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + } + + const client = new ApolloClient({ + cache, + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => + useSuspenseQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "partial", + networkStatus: NetworkStatus.loading, + error: undefined, + }); + } + + enqueueInitialChunk({ + data: { greeting: { message: "Hello world", __typename: "Greeting" } }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + client.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-and-network" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + message: "Hello cached", + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.loading, + error: undefined, + }); + } + + enqueueInitialChunk({ + data: { greeting: { __typename: "Greeting", message: "Hello world" } }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("suspends deferred queries with lists and properly patches results", async () => { + const query = gql` + query { + greetings { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greetings: [ + { __typename: "Greeting", message: "Hello world" }, + { __typename: "Greeting", message: "Hello again" }, + ], + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [ + { __typename: "Greeting", message: "Hello world" }, + { __typename: "Greeting", message: "Hello again" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Alice" }, + }, + path: ["greetings", 0], + }, + ], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + { + __typename: "Greeting", + message: "Hello again", + }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Bob" }, + }, + path: ["greetings", 1], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + { + __typename: "Greeting", + message: "Hello again", + recipient: { __typename: "Person", name: "Bob" }, + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("suspends queries with deferred fragments in lists and properly merges arrays", async () => { + const query = gql` + query DeferVariation { + allProducts { + delivery { + ...MyFragment @defer + } + sku + id + } + } + + fragment MyFragment on DeliveryEstimates { + estimatedDelivery + fastestDelivery + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + hasNext: false, + incremental: [ + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + path: ["allProducts", 0, "delivery"], + }, + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + path: ["allProducts", 1, "delivery"], + }, + ], + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("incrementally rerenders data returned by a `refetch` for a deferred query", async () => { + const query = gql` + query { + greeting { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { greeting: { __typename: "Greeting", message: "Hello world" } }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const refetchPromise = getCurrentSnapshot().refetch(); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + }, + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Bob", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(refetchPromise).resolves.toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + }, + }); +}); + +test("incrementally renders data returned after skipping a deferred query", async () => { + const query = gql` + query { + greeting { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using __disabledAct = disableActEnvironment(); + const { takeRender, rerender } = await renderSuspenseHook( + ({ skip }) => useSuspenseQuery(query, { skip }), + { + initialProps: { skip: true }, + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: undefined, + dataState: "empty", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await rerender({ skip: false }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { greeting: { __typename: "Greeting", message: "Hello world" } }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +// TODO: This test is a bit of a lie. `fetchMore` should incrementally +// rerender when using `@defer` but there is currently a bug in the core +// implementation that prevents updates until the final result is returned. +// This test reflects the behavior as it exists today, but will need +// to be updated once the core bug is fixed. +// +// NOTE: A duplicate it.failng test has been added right below this one with +// the expected behavior added in (i.e. the commented code in this test). Once +// the core bug is fixed, this test can be removed in favor of the other test. +// +// https://github.com/apollographql/apollo-client/issues/11034 +test("rerenders data returned by `fetchMore` for a deferred query", async () => { + const query = gql` + query ($offset: Int) { + greetings(offset: $offset) { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + greetings: offsetLimitPagination(), + }, + }, + }, + }); + + const client = new ApolloClient({ + link: httpLink, + cache, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greetings: [{ __typename: "Greeting", message: "Hello world" }], + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [{ __typename: "Greeting", message: "Hello world" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greetings", 0], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const fetchMorePromise = getCurrentSnapshot().fetchMore({ + variables: { offset: 1 }, + }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }, + hasNext: true, + }); + + // TODO: Re-enable once the core bug is fixed + // { + // const { snapshot, renderedComponents } = await takeRender(); + // + // expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + // expect(snapshot).toStrictEqualTyped({ + // data: markAsStreaming({ + // greetings: [ + // { + // __typename: "Greeting", + // message: "Hello world", + // recipient: { + // __typename: "Person", + // name: "Alice", + // }, + // }, + // { + // __typename: "Greeting", + // message: "Goodbye", + // }, + // ], + // }), + // dataState: "streaming", + // networkStatus: NetworkStatus.streaming, + // error: undefined, + // }); + // } + + await wait(0); + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Bob", __typename: "Person" }, + }, + path: ["greetings", 0], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(fetchMorePromise!).resolves.toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + }); + + await expect(takeRender).not.toRerender(); +}); + +// TODO: This is a duplicate of the test above, but with the expected behavior +// added (hence the `it.failing`). Remove the previous test once issue #11034 +// is fixed. +// +// https://github.com/apollographql/apollo-client/issues/11034 +it.failing( + "incrementally rerenders data returned by a `fetchMore` for a deferred query", + async () => { + const query = gql` + query ($offset: Int) { + greetings(offset: $offset) { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + greetings: offsetLimitPagination(), + }, + }, + }, + }); + + const client = new ApolloClient({ + link: httpLink, + cache, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greetings: [{ __typename: "Greeting", message: "Hello world" }], + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [{ __typename: "Greeting", message: "Hello world" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greetings", 0], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const fetchMorePromise = getCurrentSnapshot().fetchMore({ + variables: { offset: 1 }, + }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Bob", __typename: "Person" }, + }, + path: ["greetings", 0], + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(fetchMorePromise!).resolves.toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + }); + + await expect(takeRender).not.toRerender(); + } +); + +test("throws network errors returned by deferred queries", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink(() => { + return throwError(() => new Error("Could not fetch")).pipe(delay(20)); + }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorFallback"]); + expect(snapshot).toStrictEqualTyped({ + error: new Error("Could not fetch"), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("throws graphql errors returned by deferred queries", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk } = mockDefer20220824(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + errors: [{ message: "Could not fetch greeting" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorFallback"]); + expect(snapshot).toStrictEqualTyped({ + error: new CombinedGraphQLErrors({ + data: null, + errors: [{ message: "Could not fetch greeting" }], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("throws errors returned by deferred queries that include partial data", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink(() => { + return of({ + data: { greeting: null }, + errors: [{ message: "Could not fetch greeting" }], + }).pipe(delay(20)); + }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorFallback"]); + expect(snapshot).toStrictEqualTyped({ + error: new CombinedGraphQLErrors({ + data: { greeting: null }, + errors: [{ message: "Could not fetch greeting" }], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("discards partial data and throws errors returned in incremental chunks", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + path: ["hero", "heroFriends", 0], + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + }, + // This chunk is ignored since errorPolicy `none` throws away partial + // data + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorFallback"]); + expect(snapshot).toStrictEqualTyped({ + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("adds partial data and does not throw errors returned in incremental chunks but returns them in `error` property with errorPolicy set to `all`", async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + path: ["hero", "heroFriends", 0], + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + }, + // Unlike the default (errorPolicy = `none`), this data will be + // added to the final result + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.error, + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + path: ["hero", "heroFriends", 0], + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + }, + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("can refetch and respond to cache updates after encountering an error in an incremental chunk for a deferred query when `errorPolicy` is `all`", async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + }, + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + path: ["hero", "heroFriends", 0], + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + }, + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: null }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.error, + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: null }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }, + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), + }); + } + + const refetchPromise = getCurrentSnapshot().refetch(); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + }, + }, + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: null }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + path: ["hero", "heroFriends", 0], + data: { + homeWorld: "Alderaan", + }, + }, + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + }, + ], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(refetchPromise!).resolves.toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }, + }); + + client.cache.updateQuery({ query }, (data) => ({ + hero: { + ...data.hero, + name: "C3PO", + }, + })); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "C3PO", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); From fec5d76bc64d6522ce98dbb8e3767e4f842b0599 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 21:51:14 -0600 Subject: [PATCH 053/254] Ignore useSuspenseQuery subfile tests --- config/jest.config.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/config/jest.config.ts b/config/jest.config.ts index 9e8be6190dc..bf0997ccec5 100644 --- a/config/jest.config.ts +++ b/config/jest.config.ts @@ -49,6 +49,7 @@ const react17TestFileIgnoreList = [ "src/testing/experimental/__tests__/createTestSchema.test.tsx", "src/react/hooks/__tests__/useSuspenseFragment.test.tsx", "src/react/hooks/__tests__/useSuspenseQuery.test.tsx", + "src/react/hooks/__tests__/useSuspenseQuery/*", "src/react/hooks/__tests__/useBackgroundQuery.test.tsx", "src/react/hooks/__tests__/useLoadableQuery.test.tsx", "src/react/hooks/__tests__/useQueryRefHandlers.test.tsx", From 0064d8f4ac95d2fa8b1709cbca1075db79317fe5 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 21:55:10 -0600 Subject: [PATCH 054/254] Remove unneeded heck for React 19 in test --- .../hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx index 53e520fa515..796380b6724 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx @@ -28,8 +28,6 @@ import { import { offsetLimitPagination } from "@apollo/client/utilities"; import { invariant } from "@apollo/client/utilities/invariant"; -const IS_REACT_19 = React.version.startsWith("19"); - async function renderSuspenseHook< TData, TVariables extends OperationVariables, From 1d61c488a7fc6245c707a6d56cabf9774a3355b0 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 22:18:34 -0600 Subject: [PATCH 055/254] Remove unneeded non-null assertion --- .../hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx index 796380b6724..a5710c6025a 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx @@ -1398,7 +1398,7 @@ test("rerenders data returned by `fetchMore` for a deferred query", async () => }); } - await expect(fetchMorePromise!).resolves.toStrictEqualTyped({ + await expect(fetchMorePromise).resolves.toStrictEqualTyped({ data: { greetings: [ { From 74ab3ca44010fe592368a4f43b7a45fb882033ff Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 22:21:14 -0600 Subject: [PATCH 056/254] Rename ErrorFallback to ErrorBoundary --- .../__tests__/useSuspenseQuery/defer20220824.test.tsx | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx index a5710c6025a..459ddbf4f1c 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx @@ -52,7 +52,7 @@ async function renderSuspenseHook< } function ErrorFallback() { - useTrackRenders({ name: "ErrorFallback" }); + useTrackRenders({ name: "ErrorBoundary" }); return null; } @@ -1682,7 +1682,7 @@ test("throws network errors returned by deferred queries", async () => { { const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual(["ErrorFallback"]); + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); expect(snapshot).toStrictEqualTyped({ error: new Error("Could not fetch"), }); @@ -1739,7 +1739,7 @@ test("throws graphql errors returned by deferred queries", async () => { { const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual(["ErrorFallback"]); + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); expect(snapshot).toStrictEqualTyped({ error: new CombinedGraphQLErrors({ data: null, @@ -1797,7 +1797,7 @@ test("throws errors returned by deferred queries that include partial data", asy { const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual(["ErrorFallback"]); + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); expect(snapshot).toStrictEqualTyped({ error: new CombinedGraphQLErrors({ data: { greeting: null }, @@ -1927,7 +1927,7 @@ test("discards partial data and throws errors returned in incremental chunks", a { const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual(["ErrorFallback"]); + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); expect(snapshot).toStrictEqualTyped({ error: new CombinedGraphQLErrors({ data: { From 78266894239aaf8ce97f658256bc18f0580f2835 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 22:27:34 -0600 Subject: [PATCH 057/254] Remove unneeded non-null assertion --- .../hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx index 459ddbf4f1c..59e373142ec 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx @@ -2487,7 +2487,7 @@ test("can refetch and respond to cache updates after encountering an error in an }); } - await expect(refetchPromise!).resolves.toStrictEqualTyped({ + await expect(refetchPromise).resolves.toStrictEqualTyped({ data: { hero: { heroFriends: [ From 51e9ff3ae580bac2f520f5a79f8fb49b0e29c9d3 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 22:28:10 -0600 Subject: [PATCH 058/254] Add useSuspenseQuery tets for GraphQL17Alpha9Handler --- .../deferGraphQL17Alpha9.test.tsx | 2588 +++++++++++++++++ 1 file changed, 2588 insertions(+) create mode 100644 src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx diff --git a/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx new file mode 100644 index 00000000000..4ad4e3ceb3f --- /dev/null +++ b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx @@ -0,0 +1,2588 @@ +import type { RenderOptions } from "@testing-library/react"; +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import React, { Suspense } from "react"; +import { ErrorBoundary } from "react-error-boundary"; +import { delay, of, throwError } from "rxjs"; + +import type { ErrorLike, OperationVariables } from "@apollo/client"; +import { + ApolloClient, + ApolloLink, + CombinedGraphQLErrors, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import { ApolloProvider, useSuspenseQuery } from "@apollo/client/react"; +import { + markAsStreaming, + mockDeferStreamGraphQL17Alpha9, + spyOnConsole, + wait, +} from "@apollo/client/testing/internal"; +import { offsetLimitPagination } from "@apollo/client/utilities"; +import { invariant } from "@apollo/client/utilities/invariant"; + +async function renderSuspenseHook< + TData, + TVariables extends OperationVariables, + Props = never, +>( + renderHook: ( + props: Props extends never ? undefined : Props + ) => useSuspenseQuery.Result, + options: Pick & { initialProps?: Props } +) { + function UseSuspenseQuery({ props }: { props: Props | undefined }) { + useTrackRenders({ name: "useSuspenseQuery" }); + replaceSnapshot(renderHook(props as any)); + + return null; + } + + function SuspenseFallback() { + useTrackRenders({ name: "SuspenseFallback" }); + + return null; + } + + function ErrorFallback() { + useTrackRenders({ name: "ErrorBoundary" }); + + return null; + } + + function App({ props }: { props: Props | undefined }) { + return ( + }> + replaceSnapshot({ error })} + > + + + + ); + } + + const { render, takeRender, replaceSnapshot, getCurrentRender } = + createRenderStream< + useSuspenseQuery.Result | { error: ErrorLike } + >(); + + const utils = await render(, options); + + function rerender(props: Props) { + return utils.rerender(); + } + + function getCurrentSnapshot() { + const { snapshot } = getCurrentRender(); + + invariant("data" in snapshot, "Snapshot is not a hook snapshot"); + + return snapshot; + } + + return { getCurrentSnapshot, takeRender, rerender }; +} + +test("suspends deferred queries until initial chunk loads then streams in data as it loads", async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { greeting: { message: "Hello world", __typename: "Greeting" } }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { message: "Hello world", __typename: "Greeting" }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test.each([ + "cache-first", + "network-only", + "no-cache", + "cache-and-network", +])( + 'suspends deferred queries until initial chunk loads then streams in data as it loads when using a "%s" fetch policy', + async (fetchPolicy) => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { message: "Hello world", __typename: "Greeting" }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); + } +); + +test('does not suspend deferred queries with data in the cache and using a "cache-first" fetch policy', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + }); + + const client = new ApolloClient({ + cache, + link: ApolloLink.empty(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-first" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + await expect(takeRender).not.toRerender(); +}); + +test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + const cache = new InMemoryCache(); + + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + { + using _consoleSpy = spyOnConsole("error"); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + } + + const client = new ApolloClient({ + cache, + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => + useSuspenseQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "partial", + networkStatus: NetworkStatus.loading, + error: undefined, + }); + } + + enqueueInitialChunk({ + data: { greeting: { message: "Hello world", __typename: "Greeting" } }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + client.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-and-network" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + message: "Hello cached", + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.loading, + error: undefined, + }); + } + + enqueueInitialChunk({ + data: { greeting: { __typename: "Greeting", message: "Hello world" } }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("suspends deferred queries with lists and properly patches results", async () => { + const query = gql` + query { + greetings { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greetings: [ + { __typename: "Greeting", message: "Hello world" }, + { __typename: "Greeting", message: "Hello again" }, + ], + }, + pending: [ + { id: "0", path: ["greetings", 0] }, + { id: "1", path: ["greetings", 1] }, + ], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [ + { __typename: "Greeting", message: "Hello world" }, + { __typename: "Greeting", message: "Hello again" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Alice" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + { + __typename: "Greeting", + message: "Hello again", + }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Bob" }, + }, + id: "1", + }, + ], + completed: [{ id: "1" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + { + __typename: "Greeting", + message: "Hello again", + recipient: { __typename: "Person", name: "Bob" }, + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("suspends queries with deferred fragments in lists and properly merges arrays", async () => { + const query = gql` + query DeferVariation { + allProducts { + delivery { + ...MyFragment @defer + } + sku + id + } + } + + fragment MyFragment on DeliveryEstimates { + estimatedDelivery + fastestDelivery + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + pending: [ + { id: "0", path: ["allProducts", 0, "delivery"] }, + { id: "1", path: ["allProducts", 1, "delivery"] }, + ], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + hasNext: false, + incremental: [ + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "0", + }, + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "1", + }, + ], + completed: [{ id: "0" }, { id: "1" }], + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("incrementally rerenders data returned by a `refetch` for a deferred query", async () => { + const query = gql` + query { + greeting { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { greeting: { __typename: "Greeting", message: "Hello world" } }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const refetchPromise = getCurrentSnapshot().refetch(); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + }, + }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Bob", __typename: "Person" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(refetchPromise).resolves.toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + }, + }); +}); + +test("incrementally renders data returned after skipping a deferred query", async () => { + const query = gql` + query { + greeting { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using __disabledAct = disableActEnvironment(); + const { takeRender, rerender } = await renderSuspenseHook( + ({ skip }) => useSuspenseQuery(query, { skip }), + { + initialProps: { skip: true }, + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: undefined, + dataState: "empty", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await rerender({ skip: false }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { greeting: { __typename: "Greeting", message: "Hello world" } }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { + __typename: "Greeting", + message: "Hello world", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +// TODO: This test is a bit of a lie. `fetchMore` should incrementally +// rerender when using `@defer` but there is currently a bug in the core +// implementation that prevents updates until the final result is returned. +// This test reflects the behavior as it exists today, but will need +// to be updated once the core bug is fixed. +// +// NOTE: A duplicate it.failng test has been added right below this one with +// the expected behavior added in (i.e. the commented code in this test). Once +// the core bug is fixed, this test can be removed in favor of the other test. +// +// https://github.com/apollographql/apollo-client/issues/11034 +test("rerenders data returned by `fetchMore` for a deferred query", async () => { + const query = gql` + query ($offset: Int) { + greetings(offset: $offset) { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + greetings: offsetLimitPagination(), + }, + }, + }, + }); + + const client = new ApolloClient({ + link: httpLink, + cache, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greetings: [{ __typename: "Greeting", message: "Hello world" }], + }, + pending: [{ id: "0", path: ["greetings", 0] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [{ __typename: "Greeting", message: "Hello world" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const fetchMorePromise = getCurrentSnapshot().fetchMore({ + variables: { offset: 1 }, + }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }, + pending: [{ id: "0", path: ["greetings", 0] }], + hasNext: true, + }); + + // TODO: Re-enable once the core bug is fixed + // { + // const { snapshot, renderedComponents } = await takeRender(); + // + // expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + // expect(snapshot).toStrictEqualTyped({ + // data: markAsStreaming({ + // greetings: [ + // { + // __typename: "Greeting", + // message: "Hello world", + // recipient: { + // __typename: "Person", + // name: "Alice", + // }, + // }, + // { + // __typename: "Greeting", + // message: "Goodbye", + // }, + // ], + // }), + // dataState: "streaming", + // networkStatus: NetworkStatus.streaming, + // error: undefined, + // }); + // } + + await wait(0); + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Bob", __typename: "Person" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(fetchMorePromise).resolves.toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + }); + + await expect(takeRender).not.toRerender(); +}); + +// TODO: This is a duplicate of the test above, but with the expected behavior +// added (hence the `it.failing`). Remove the previous test once issue #11034 +// is fixed. +// +// https://github.com/apollographql/apollo-client/issues/11034 +it.failing( + "incrementally rerenders data returned by a `fetchMore` for a deferred query", + async () => { + const query = gql` + query ($offset: Int) { + greetings(offset: $offset) { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + greetings: offsetLimitPagination(), + }, + }, + }, + }); + + const client = new ApolloClient({ + link: httpLink, + cache, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greetings: [{ __typename: "Greeting", message: "Hello world" }], + }, + pending: [{ id: "0", path: ["greetings", 0] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [{ __typename: "Greeting", message: "Hello world" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const fetchMorePromise = getCurrentSnapshot().fetchMore({ + variables: { offset: 1 }, + }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }, + pending: [{ id: "0", path: ["greetings", 0] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Bob", __typename: "Person" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(fetchMorePromise).resolves.toStrictEqualTyped({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + }); + + await expect(takeRender).not.toRerender(); + } +); + +test("throws network errors returned by deferred queries", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink(() => { + return throwError(() => new Error("Could not fetch")).pipe(delay(20)); + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); + expect(snapshot).toStrictEqualTyped({ + error: new Error("Could not fetch"), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("throws graphql errors returned by deferred queries", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk } = mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + errors: [{ message: "Could not fetch greeting" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); + expect(snapshot).toStrictEqualTyped({ + error: new CombinedGraphQLErrors({ + data: null, + errors: [{ message: "Could not fetch greeting" }], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("throws errors returned by deferred queries that include partial data", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink(() => { + return of({ + data: { greeting: null }, + errors: [{ message: "Could not fetch greeting" }], + }).pipe(delay(20)); + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); + expect(snapshot).toStrictEqualTyped({ + error: new CombinedGraphQLErrors({ + data: { greeting: null }, + errors: [{ message: "Could not fetch greeting" }], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("discards partial data and throws errors returned in incremental chunks", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + pending: [ + { id: "0", path: ["hero", "heroFriends", 0] }, + { id: "1", path: ["hero", "heroFriends", 1] }, + ], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + id: "0", + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + }, + // This chunk is ignored since errorPolicy `none` throws away partial + // data + { + id: "1", + data: { + homeWorld: "Alderaan", + }, + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); + expect(snapshot).toStrictEqualTyped({ + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("adds partial data and does not throw errors returned in incremental chunks but returns them in `error` property with errorPolicy set to `all`", async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + pending: [ + { id: "0", path: ["hero", "heroFriends", 0] }, + { id: "1", path: ["hero", "heroFriends", 1] }, + ], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + id: "0", + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + }, + // Unlike the default (errorPolicy = `none`), this data will be + // added to the final result + { + id: "1", + data: { + homeWorld: "Alderaan", + }, + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.error, + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + pending: [ + { id: "0", path: ["hero", "heroFriends", 0] }, + { id: "1", path: ["hero", "heroFriends", 1] }, + ], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + id: "0", + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + }, + { + id: "1", + data: { + homeWorld: "Alderaan", + }, + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("can refetch and respond to cache updates after encountering an error in an incremental chunk for a deferred query when `errorPolicy` is `all`", async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + }, + }, + pending: [ + { id: "0", path: ["hero", "heroFriends", 0] }, + { id: "1", path: ["hero", "heroFriends", 1] }, + ], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + id: "0", + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + data: { + homeWorld: null, + }, + }, + { + id: "1", + data: { + homeWorld: "Alderaan", + }, + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: null }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.error, + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: null }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }, + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), + }); + } + + const refetchPromise = getCurrentSnapshot().refetch(); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + }, + }, + pending: [ + { id: "0", path: ["hero", "heroFriends", 0] }, + { id: "1", path: ["hero", "heroFriends", 1] }, + ], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: null }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + id: "0", + data: { + homeWorld: "Alderaan", + }, + }, + { + id: "1", + data: { + homeWorld: "Alderaan", + }, + }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(refetchPromise).resolves.toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }, + }); + + client.cache.updateQuery({ query }, (data) => ({ + hero: { + ...data.hero, + name: "C3PO", + }, + })); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "C3PO", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); From 61736c056a21b61af4af84d962242f64e0f09a21 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 22:36:41 -0600 Subject: [PATCH 059/254] Copy useBackgroundQuery defer tests to own file --- .../__tests__/useBackgroundQuery.test.tsx | 297 -------------- .../useBackgroundQuery/defer20220824.test.tsx | 361 ++++++++++++++++++ 2 files changed, 361 insertions(+), 297 deletions(-) create mode 100644 src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx diff --git a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx index d0bce5acec0..d1c0db9e893 100644 --- a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx @@ -30,7 +30,6 @@ import { NetworkStatus, } from "@apollo/client"; import { InMemoryCache } from "@apollo/client/cache"; -import { Defer20220824Handler } from "@apollo/client/incremental"; import type { QueryRef } from "@apollo/client/react"; import { ApolloProvider, @@ -1391,150 +1390,6 @@ it("works with startTransition to change variables", async () => { } }); -it('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { - interface Data { - greeting: { - __typename: string; - message: string; - recipient: { name: string; __typename: string }; - }; - } - - const query: TypedDocumentNode = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - const cache = new InMemoryCache(); - cache.writeQuery({ - query, - data: { - greeting: { - __typename: "Greeting", - message: "Hello cached", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - }); - const client = new ApolloClient({ - cache, - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const renderStream = createDefaultProfiler(); - - const { SuspenseFallback, ReadQueryHook } = - createDefaultTrackedComponents(renderStream); - - function App() { - useTrackRenders(); - const [queryRef] = useBackgroundQuery(query, { - fetchPolicy: "cache-and-network", - }); - - return ( - }> - - - ); - } - - using _disabledAct = disableActEnvironment(); - await renderStream.render(, { wrapper: createClientWrapper(client) }); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello cached", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "complete", - error: undefined, - networkStatus: NetworkStatus.loading, - }); - } - - link.simulateResult({ - result: { - data: { - greeting: { __typename: "Greeting", message: "Hello world" }, - }, - hasNext: true, - }, - }); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "streaming", - error: undefined, - networkStatus: NetworkStatus.streaming, - }); - } - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - error: undefined, - networkStatus: NetworkStatus.ready, - }); - } - - await expect(renderStream).not.toRerender({ timeout: 50 }); -}); it("reacts to cache updates", async () => { const { query, mocks } = setupSimpleCase(); @@ -3816,158 +3671,6 @@ it('suspends and does not use partial data when changing variables and using a " await expect(renderStream).not.toRerender({ timeout: 50 }); }); -it('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - interface QueryData { - greeting: { - __typename: string; - message?: string; - recipient?: { - __typename: string; - name: string; - }; - }; - } - - const query: TypedDocumentNode = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - const cache = new InMemoryCache(); - - // We are intentionally writing partial data to the cache. Supress console - // warnings to avoid unnecessary noise in the test. - { - using _consoleSpy = spyOnConsole("error"); - cache.writeQuery({ - query, - data: { - greeting: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - }); - } - - const client = new ApolloClient({ - link, - cache, - incrementalHandler: new Defer20220824Handler(), - }); - - const renderStream = createDefaultProfiler>(); - const { SuspenseFallback, ReadQueryHook } = - createDefaultTrackedComponents(renderStream); - - function App() { - useTrackRenders(); - const [queryRef] = useBackgroundQuery(query, { - fetchPolicy: "cache-first", - returnPartialData: true, - }); - - return ( - }> - - - ); - } - - using _disabledAct = disableActEnvironment(); - await renderStream.render(, { wrapper: createClientWrapper(client) }); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "partial", - error: undefined, - networkStatus: NetworkStatus.loading, - }); - } - - link.simulateResult({ - result: { - data: { - greeting: { message: "Hello world", __typename: "Greeting" }, - }, - hasNext: true, - }, - }); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "streaming", - error: undefined, - networkStatus: NetworkStatus.streaming, - }); - } - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - __typename: "Greeting", - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - error: undefined, - networkStatus: NetworkStatus.ready, - }); - } - - await expect(renderStream).not.toRerender({ timeout: 50 }); -}); it.each([ "cache-first", diff --git a/src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx new file mode 100644 index 00000000000..f2f0b875029 --- /dev/null +++ b/src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx @@ -0,0 +1,361 @@ +import type { RenderStream } from "@testing-library/react-render-stream"; +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import React, { Suspense } from "react"; + +import type { DataState, TypedDocumentNode } from "@apollo/client"; +import { ApolloClient, gql, NetworkStatus } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import type { QueryRef } from "@apollo/client/react"; +import { useBackgroundQuery, useReadQuery } from "@apollo/client/react"; +import { MockSubscriptionLink } from "@apollo/client/testing"; +import { + createClientWrapper, + spyOnConsole, +} from "@apollo/client/testing/internal"; +import type { DeepPartial } from "@apollo/client/utilities"; + +function createDefaultTrackedComponents< + Snapshot extends { + result: useReadQuery.Result | null; + }, + TData = Snapshot["result"] extends useReadQuery.Result | null ? + TData + : unknown, + TStates extends DataState["dataState"] = Snapshot["result"] extends ( + useReadQuery.Result | null + ) ? + TStates + : "complete" | "streaming", +>(renderStream: RenderStream) { + function SuspenseFallback() { + useTrackRenders(); + return
Loading
; + } + + function ReadQueryHook({ + queryRef, + }: { + queryRef: QueryRef; + }) { + useTrackRenders(); + renderStream.mergeSnapshot({ + result: useReadQuery(queryRef), + } as unknown as Partial); + + return null; + } + + return { SuspenseFallback, ReadQueryHook }; +} + +function createDefaultProfiler() { + return createRenderStream({ + initialSnapshot: { + result: null as useReadQuery.Result | null, + }, + }); +} + +test('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + interface Data { + greeting: { + __typename: string; + message: string; + recipient: { name: string; __typename: string }; + }; + } + + const query: TypedDocumentNode = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + const cache = new InMemoryCache(); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + const client = new ApolloClient({ + cache, + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const renderStream = createDefaultProfiler(); + + const { SuspenseFallback, ReadQueryHook } = + createDefaultTrackedComponents(renderStream); + + function App() { + useTrackRenders(); + const [queryRef] = useBackgroundQuery(query, { + fetchPolicy: "cache-and-network", + }); + + return ( + }> + + + ); + } + + using _disabledAct = disableActEnvironment(); + await renderStream.render(, { wrapper: createClientWrapper(client) }); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + link.simulateResult({ + result: { + data: { + greeting: { __typename: "Greeting", message: "Hello world" }, + }, + hasNext: true, + }, + }); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(renderStream).not.toRerender({ timeout: 50 }); +}); + +test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + interface QueryData { + greeting: { + __typename: string; + message?: string; + recipient?: { + __typename: string; + name: string; + }; + }; + } + + const query: TypedDocumentNode = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + const cache = new InMemoryCache(); + + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + { + using _consoleSpy = spyOnConsole("error"); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + } + + const client = new ApolloClient({ + link, + cache, + incrementalHandler: new Defer20220824Handler(), + }); + + const renderStream = createDefaultProfiler>(); + const { SuspenseFallback, ReadQueryHook } = + createDefaultTrackedComponents(renderStream); + + function App() { + useTrackRenders(); + const [queryRef] = useBackgroundQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }); + + return ( + }> + + + ); + } + + using _disabledAct = disableActEnvironment(); + await renderStream.render(, { wrapper: createClientWrapper(client) }); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "partial", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + link.simulateResult({ + result: { + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + hasNext: true, + }, + }); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(renderStream).not.toRerender({ timeout: 50 }); +}); + From aa17427a547356bed7dd8fe5888e9e6d78512c64 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 22:40:14 -0600 Subject: [PATCH 060/254] Use the new helpers --- .../useBackgroundQuery/defer20220824.test.tsx | 88 ++++++++----------- 1 file changed, 39 insertions(+), 49 deletions(-) diff --git a/src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx index f2f0b875029..182866ea9af 100644 --- a/src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx @@ -12,9 +12,9 @@ import { InMemoryCache } from "@apollo/client/cache"; import { Defer20220824Handler } from "@apollo/client/incremental"; import type { QueryRef } from "@apollo/client/react"; import { useBackgroundQuery, useReadQuery } from "@apollo/client/react"; -import { MockSubscriptionLink } from "@apollo/client/testing"; import { createClientWrapper, + mockDefer20220824, spyOnConsole, } from "@apollo/client/testing/internal"; import type { DeepPartial } from "@apollo/client/utilities"; @@ -83,7 +83,9 @@ test('does not suspend deferred queries with data in the cache and using a "cach } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + const cache = new InMemoryCache(); cache.writeQuery({ query, @@ -97,7 +99,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); const client = new ApolloClient({ cache, - link, + link: httpLink, incrementalHandler: new Defer20220824Handler(), }); @@ -140,13 +142,11 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); } - link.simulateResult({ - result: { - data: { - greeting: { __typename: "Greeting", message: "Hello world" }, - }, - hasNext: true, + enqueueInitialChunk({ + data: { + greeting: { __typename: "Greeting", message: "Hello world" }, }, + hasNext: true, }); { @@ -167,23 +167,18 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); } - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], }, - }, - true - ); + ], + hasNext: false, + }); { const { snapshot, renderedComponents } = await renderStream.takeRender(); @@ -231,7 +226,9 @@ test('does not suspend deferred queries with partial data in the cache and using } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + const cache = new InMemoryCache(); // We are intentionally writing partial data to the cache. Supress console @@ -250,7 +247,7 @@ test('does not suspend deferred queries with partial data in the cache and using } const client = new ApolloClient({ - link, + link: httpLink, cache, incrementalHandler: new Defer20220824Handler(), }); @@ -293,13 +290,11 @@ test('does not suspend deferred queries with partial data in the cache and using }); } - link.simulateResult({ - result: { - data: { - greeting: { message: "Hello world", __typename: "Greeting" }, - }, - hasNext: true, + enqueueInitialChunk({ + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, }, + hasNext: true, }); { @@ -320,23 +315,18 @@ test('does not suspend deferred queries with partial data in the cache and using }); } - link.simulateResult( - { - result: { - incremental: [ - { - data: { - __typename: "Greeting", - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greeting"], - }, - ], - hasNext: false, + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], }, - }, - true - ); + ], + hasNext: false, + }); { const { snapshot, renderedComponents } = await renderStream.takeRender(); From da0113cf7240a99b0bf88feff42f33b17eb375a5 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 22:59:41 -0600 Subject: [PATCH 061/254] Use a pattern similar to useSuspenseQuery tests in useBackgroundQuery defer tests --- .../useBackgroundQuery/defer20220824.test.tsx | 196 +++++++++--------- 1 file changed, 101 insertions(+), 95 deletions(-) diff --git a/src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx index 182866ea9af..9ce0c721105 100644 --- a/src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery/defer20220824.test.tsx @@ -1,12 +1,18 @@ -import type { RenderStream } from "@testing-library/react-render-stream"; +import type { RenderOptions } from "@testing-library/react"; import { createRenderStream, disableActEnvironment, useTrackRenders, } from "@testing-library/react-render-stream"; import React, { Suspense } from "react"; - -import type { DataState, TypedDocumentNode } from "@apollo/client"; +import { ErrorBoundary } from "react-error-boundary"; + +import type { + DataState, + ErrorLike, + OperationVariables, + TypedDocumentNode, +} from "@apollo/client"; import { ApolloClient, gql, NetworkStatus } from "@apollo/client"; import { InMemoryCache } from "@apollo/client/cache"; import { Defer20220824Handler } from "@apollo/client/incremental"; @@ -17,48 +23,69 @@ import { mockDefer20220824, spyOnConsole, } from "@apollo/client/testing/internal"; -import type { DeepPartial } from "@apollo/client/utilities"; - -function createDefaultTrackedComponents< - Snapshot extends { - result: useReadQuery.Result | null; - }, - TData = Snapshot["result"] extends useReadQuery.Result | null ? - TData - : unknown, - TStates extends DataState["dataState"] = Snapshot["result"] extends ( - useReadQuery.Result | null + +async function renderSuspenseHook< + TData, + TVariables extends OperationVariables, + TQueryRef extends QueryRef, + TStates extends DataState["dataState"] = TQueryRef extends ( + QueryRef ) ? - TStates - : "complete" | "streaming", ->(renderStream: RenderStream) { + States + : never, + Props = never, +>( + renderHook: ( + props: Props extends never ? undefined : Props + ) => [TQueryRef, useBackgroundQuery.Result], + options: Pick & { initialProps?: Props } +) { + function UseReadQuery({ queryRef }: { queryRef: QueryRef }) { + useTrackRenders({ name: "useReadQuery" }); + replaceSnapshot(useReadQuery(queryRef) as any); + + return null; + } + function SuspenseFallback() { - useTrackRenders(); - return
Loading
; + useTrackRenders({ name: "SuspenseFallback" }); + + return null; } - function ReadQueryHook({ - queryRef, - }: { - queryRef: QueryRef; - }) { - useTrackRenders(); - renderStream.mergeSnapshot({ - result: useReadQuery(queryRef), - } as unknown as Partial); + function ErrorFallback() { + useTrackRenders({ name: "ErrorBoundary" }); return null; } - return { SuspenseFallback, ReadQueryHook }; -} + function App({ props }: { props: Props | undefined }) { + useTrackRenders({ name: "useBackgroundQuery" }); + const [queryRef] = renderHook(props as any); -function createDefaultProfiler() { - return createRenderStream({ - initialSnapshot: { - result: null as useReadQuery.Result | null, - }, - }); + return ( + }> + replaceSnapshot({ error })} + > + + + + ); + } + + const { render, takeRender, replaceSnapshot } = createRenderStream< + useReadQuery.Result | { error: ErrorLike } + >(); + + const utils = await render(, options); + + function rerender(props: Props) { + return utils.rerender(); + } + + return { takeRender, rerender }; } test('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { @@ -103,32 +130,20 @@ test('does not suspend deferred queries with data in the cache and using a "cach incrementalHandler: new Defer20220824Handler(), }); - const renderStream = createDefaultProfiler(); - - const { SuspenseFallback, ReadQueryHook } = - createDefaultTrackedComponents(renderStream); - - function App() { - useTrackRenders(); - const [queryRef] = useBackgroundQuery(query, { - fetchPolicy: "cache-and-network", - }); - - return ( - }> - - - ); - } - using _disabledAct = disableActEnvironment(); - await renderStream.render(, { wrapper: createClientWrapper(client) }); + const { takeRender } = await renderSuspenseHook( + () => useBackgroundQuery(query, { fetchPolicy: "cache-and-network" }), + { wrapper: createClientWrapper(client) } + ); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ + expect(renderedComponents).toStrictEqual([ + "useBackgroundQuery", + "useReadQuery", + ]); + expect(snapshot).toStrictEqualTyped({ data: { greeting: { __typename: "Greeting", @@ -150,10 +165,10 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ data: { greeting: { __typename: "Greeting", @@ -181,10 +196,10 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ data: { greeting: { __typename: "Greeting", @@ -198,7 +213,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); } - await expect(renderStream).not.toRerender({ timeout: 50 }); + await expect(takeRender).not.toRerender(); }); test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { @@ -252,32 +267,24 @@ test('does not suspend deferred queries with partial data in the cache and using incrementalHandler: new Defer20220824Handler(), }); - const renderStream = createDefaultProfiler>(); - const { SuspenseFallback, ReadQueryHook } = - createDefaultTrackedComponents(renderStream); - - function App() { - useTrackRenders(); - const [queryRef] = useBackgroundQuery(query, { - fetchPolicy: "cache-first", - returnPartialData: true, - }); - - return ( - }> - - - ); - } - using _disabledAct = disableActEnvironment(); - await renderStream.render(, { wrapper: createClientWrapper(client) }); + const { takeRender } = await renderSuspenseHook( + () => + useBackgroundQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { wrapper: createClientWrapper(client) } + ); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ + expect(renderedComponents).toStrictEqual([ + "useBackgroundQuery", + "useReadQuery", + ]); + expect(snapshot).toStrictEqualTyped({ data: { greeting: { __typename: "Greeting", @@ -298,10 +305,10 @@ test('does not suspend deferred queries with partial data in the cache and using }); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ data: { greeting: { __typename: "Greeting", @@ -329,10 +336,10 @@ test('does not suspend deferred queries with partial data in the cache and using }); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ data: { greeting: { __typename: "Greeting", @@ -346,6 +353,5 @@ test('does not suspend deferred queries with partial data in the cache and using }); } - await expect(renderStream).not.toRerender({ timeout: 50 }); + await expect(takeRender).not.toRerender(); }); - From 9ae7fa3b6913295297c2eb456d7816551cc104f9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 23:01:15 -0600 Subject: [PATCH 062/254] Use createClientWrapper helper in the defer test files --- .../__tests__/useQuery/defer20220824.test.tsx | 35 +++------ .../useQuery/deferGraphQL17Alpha2.test.tsx | 35 +++------ .../useSuspenseQuery/defer20220824.test.tsx | 75 +++++-------------- .../deferGraphQL17Alpha9.test.tsx | 75 +++++-------------- 4 files changed, 60 insertions(+), 160 deletions(-) diff --git a/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx index 79cb0309731..a43a83dc428 100644 --- a/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx @@ -12,8 +12,9 @@ import { NetworkStatus, } from "@apollo/client"; import { Defer20220824Handler } from "@apollo/client/incremental"; -import { ApolloProvider, useQuery } from "@apollo/client/react"; +import { useQuery } from "@apollo/client/react"; import { + createClientWrapper, markAsStreaming, mockDefer20220824, spyOnConsole, @@ -46,9 +47,7 @@ test("should handle deferred queries", async () => { const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -154,9 +153,7 @@ test("should handle deferred queries in lists", async () => { const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -312,9 +309,7 @@ test("should handle deferred queries in lists, merging arrays", async () => { const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -480,9 +475,7 @@ test("should handle deferred queries with fetch policy no-cache", async () => { const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query, { fetchPolicy: "no-cache" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -590,9 +583,7 @@ test("should handle deferred queries with errors returned on the incremental bat const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -752,9 +743,7 @@ it('should handle deferred queries with errors returned on the incremental batch const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query, { errorPolicy: "all" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -947,9 +936,7 @@ it('returns eventually consistent data from deferred queries with data in the ca const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query, { fetchPolicy: "cache-and-network" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1079,9 +1066,7 @@ it('returns eventually consistent data from deferred queries with partial data i returnPartialData: true, }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); diff --git a/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx b/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx index 218f774691a..60db1cde900 100644 --- a/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx +++ b/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx @@ -12,8 +12,9 @@ import { NetworkStatus, } from "@apollo/client"; import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; -import { ApolloProvider, useQuery } from "@apollo/client/react"; +import { useQuery } from "@apollo/client/react"; import { + createClientWrapper, markAsStreaming, mockDeferStreamGraphQL17Alpha9, spyOnConsole, @@ -46,9 +47,7 @@ test("should handle deferred queries", async () => { const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -156,9 +155,7 @@ test("should handle deferred queries in lists", async () => { const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -286,9 +283,7 @@ test("should handle deferred queries in lists, merging arrays", async () => { const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -459,9 +454,7 @@ test("should handle deferred queries with fetch policy no-cache", async () => { const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query, { fetchPolicy: "no-cache" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -566,9 +559,7 @@ test("should handle deferred queries with errors returned on the incremental bat const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -733,9 +724,7 @@ test('should handle deferred queries with errors returned on the incremental bat const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query, { errorPolicy: "all" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -933,9 +922,7 @@ test('returns eventually consistent data from deferred queries with data in the const { takeSnapshot } = await renderHookToSnapshotStream( () => useQuery(query, { fetchPolicy: "cache-and-network" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1067,9 +1054,7 @@ test('returns eventually consistent data from deferred queries with partial data returnPartialData: true, }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); diff --git a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx index 59e373142ec..dcf8e4a32cb 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx @@ -18,8 +18,9 @@ import { NetworkStatus, } from "@apollo/client"; import { Defer20220824Handler } from "@apollo/client/incremental"; -import { ApolloProvider, useSuspenseQuery } from "@apollo/client/react"; +import { useSuspenseQuery } from "@apollo/client/react"; import { + createClientWrapper, markAsStreaming, mockDefer20220824, spyOnConsole, @@ -119,9 +120,7 @@ test("suspends deferred queries until initial chunk loads then streams in data a const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -218,9 +217,7 @@ test.each([ const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query, { fetchPolicy }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -323,9 +320,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query, { fetchPolicy: "cache-first" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -395,9 +390,7 @@ test('does not suspend deferred queries with partial data in the cache and using returnPartialData: true, }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -513,9 +506,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query, { fetchPolicy: "cache-and-network" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -621,9 +612,7 @@ test("suspends deferred queries with lists and properly patches results", async const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -769,9 +758,7 @@ test("suspends queries with deferred fragments in lists and properly merges arra const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -923,9 +910,7 @@ test("incrementally rerenders data returned by a `refetch` for a deferred query" const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1104,9 +1089,7 @@ test("incrementally renders data returned after skipping a deferred query", asyn ({ skip }) => useSuspenseQuery(query, { skip }), { initialProps: { skip: true }, - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1236,9 +1219,7 @@ test("rerenders data returned by `fetchMore` for a deferred query", async () => const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( () => useSuspenseQuery(query, { variables: { offset: 0 } }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1460,9 +1441,7 @@ it.failing( const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( () => useSuspenseQuery(query, { variables: { offset: 0 } }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1667,9 +1646,7 @@ test("throws network errors returned by deferred queries", async () => { const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1719,9 +1696,7 @@ test("throws graphql errors returned by deferred queries", async () => { const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1782,9 +1757,7 @@ test("throws errors returned by deferred queries that include partial data", asy const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1840,9 +1813,7 @@ test("discards partial data and throws errors returned in incremental chunks", a const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1990,9 +1961,7 @@ test("adds partial data and does not throw errors returned in incremental chunks const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query, { errorPolicy: "all" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -2159,9 +2128,7 @@ test("adds partial data and discards errors returned in incremental chunks with const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query, { errorPolicy: "ignore" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -2301,9 +2268,7 @@ test("can refetch and respond to cache updates after encountering an error in an const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( () => useSuspenseQuery(query, { errorPolicy: "all" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); diff --git a/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx index 4ad4e3ceb3f..063aa94590c 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx @@ -18,8 +18,9 @@ import { NetworkStatus, } from "@apollo/client"; import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; -import { ApolloProvider, useSuspenseQuery } from "@apollo/client/react"; +import { useSuspenseQuery } from "@apollo/client/react"; import { + createClientWrapper, markAsStreaming, mockDeferStreamGraphQL17Alpha9, spyOnConsole, @@ -119,9 +120,7 @@ test("suspends deferred queries until initial chunk loads then streams in data a const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -220,9 +219,7 @@ test.each([ const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query, { fetchPolicy }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -327,9 +324,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query, { fetchPolicy: "cache-first" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -399,9 +394,7 @@ test('does not suspend deferred queries with partial data in the cache and using returnPartialData: true, }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -519,9 +512,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query, { fetchPolicy: "cache-and-network" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -629,9 +620,7 @@ test("suspends deferred queries with lists and properly patches results", async const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -783,9 +772,7 @@ test("suspends queries with deferred fragments in lists and properly merges arra const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -942,9 +929,7 @@ test("incrementally rerenders data returned by a `refetch` for a deferred query" const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1127,9 +1112,7 @@ test("incrementally renders data returned after skipping a deferred query", asyn ({ skip }) => useSuspenseQuery(query, { skip }), { initialProps: { skip: true }, - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1261,9 +1244,7 @@ test("rerenders data returned by `fetchMore` for a deferred query", async () => const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( () => useSuspenseQuery(query, { variables: { offset: 0 } }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1489,9 +1470,7 @@ it.failing( const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( () => useSuspenseQuery(query, { variables: { offset: 0 } }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1700,9 +1679,7 @@ test("throws network errors returned by deferred queries", async () => { const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1752,9 +1729,7 @@ test("throws graphql errors returned by deferred queries", async () => { const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1815,9 +1790,7 @@ test("throws errors returned by deferred queries that include partial data", asy const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -1873,9 +1846,7 @@ test("discards partial data and throws errors returned in incremental chunks", a const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -2028,9 +1999,7 @@ test("adds partial data and does not throw errors returned in incremental chunks const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query, { errorPolicy: "all" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -2202,9 +2171,7 @@ test("adds partial data and discards errors returned in incremental chunks with const { takeRender } = await renderSuspenseHook( () => useSuspenseQuery(query, { errorPolicy: "ignore" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); @@ -2349,9 +2316,7 @@ test("can refetch and respond to cache updates after encountering an error in an const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( () => useSuspenseQuery(query, { errorPolicy: "all" }), { - wrapper: ({ children }) => ( - {children} - ), + wrapper: createClientWrapper(client), } ); From 27e5faf62144f76493f885748869d8d20db3f5b4 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 23:07:44 -0600 Subject: [PATCH 063/254] Add useBackgroundQuery tests for defer with updated spec --- .../deferGraphQL17Alpha9.test.tsx | 361 ++++++++++++++++++ 1 file changed, 361 insertions(+) create mode 100644 src/react/hooks/__tests__/useBackgroundQuery/deferGraphQL17Alpha9.test.tsx diff --git a/src/react/hooks/__tests__/useBackgroundQuery/deferGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/deferGraphQL17Alpha9.test.tsx new file mode 100644 index 00000000000..1efc081c759 --- /dev/null +++ b/src/react/hooks/__tests__/useBackgroundQuery/deferGraphQL17Alpha9.test.tsx @@ -0,0 +1,361 @@ +import type { RenderOptions } from "@testing-library/react"; +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import React, { Suspense } from "react"; +import { ErrorBoundary } from "react-error-boundary"; + +import type { + DataState, + ErrorLike, + OperationVariables, + TypedDocumentNode, +} from "@apollo/client"; +import { ApolloClient, gql, NetworkStatus } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import type { QueryRef } from "@apollo/client/react"; +import { useBackgroundQuery, useReadQuery } from "@apollo/client/react"; +import { + createClientWrapper, + mockDeferStreamGraphQL17Alpha9, + spyOnConsole, +} from "@apollo/client/testing/internal"; + +async function renderSuspenseHook< + TData, + TVariables extends OperationVariables, + TQueryRef extends QueryRef, + TStates extends DataState["dataState"] = TQueryRef extends ( + QueryRef + ) ? + States + : never, + Props = never, +>( + renderHook: ( + props: Props extends never ? undefined : Props + ) => [TQueryRef, useBackgroundQuery.Result], + options: Pick & { initialProps?: Props } +) { + function UseReadQuery({ queryRef }: { queryRef: QueryRef }) { + useTrackRenders({ name: "useReadQuery" }); + replaceSnapshot(useReadQuery(queryRef) as any); + + return null; + } + + function SuspenseFallback() { + useTrackRenders({ name: "SuspenseFallback" }); + + return null; + } + + function ErrorFallback() { + useTrackRenders({ name: "ErrorBoundary" }); + + return null; + } + + function App({ props }: { props: Props | undefined }) { + useTrackRenders({ name: "useBackgroundQuery" }); + const [queryRef] = renderHook(props as any); + + return ( + }> + replaceSnapshot({ error })} + > + + + + ); + } + + const { render, takeRender, replaceSnapshot } = createRenderStream< + useReadQuery.Result | { error: ErrorLike } + >(); + + const utils = await render(, options); + + function rerender(props: Props) { + return utils.rerender(); + } + + return { takeRender, rerender }; +} + +test('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + interface Data { + greeting: { + __typename: string; + message: string; + recipient: { name: string; __typename: string }; + }; + } + + const query: TypedDocumentNode = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const cache = new InMemoryCache(); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + const client = new ApolloClient({ + cache, + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useBackgroundQuery(query, { fetchPolicy: "cache-and-network" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual([ + "useBackgroundQuery", + "useReadQuery", + ]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + enqueueInitialChunk({ + data: { + greeting: { __typename: "Greeting", message: "Hello world" }, + }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + interface QueryData { + greeting: { + __typename: string; + message?: string; + recipient?: { + __typename: string; + name: string; + }; + }; + } + + const query: TypedDocumentNode = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const cache = new InMemoryCache(); + + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + { + using _consoleSpy = spyOnConsole("error"); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + } + + const client = new ApolloClient({ + link: httpLink, + cache, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => + useBackgroundQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { wrapper: createClientWrapper(client) } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual([ + "useBackgroundQuery", + "useReadQuery", + ]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "partial", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + enqueueInitialChunk({ + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(takeRender).not.toRerender(); +}); From 7f7f72c4da540af52f9c138cdfbf2c7874694138 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 23:20:21 -0600 Subject: [PATCH 064/254] Move useMutation defer tests to own file --- .../hooks/__tests__/useMutation.test.tsx | 378 +--------------- .../useMutation/defer20220824.test.tsx | 412 ++++++++++++++++++ 2 files changed, 413 insertions(+), 377 deletions(-) create mode 100644 src/react/hooks/__tests__/useMutation/defer20220824.test.tsx diff --git a/src/react/hooks/__tests__/useMutation.test.tsx b/src/react/hooks/__tests__/useMutation.test.tsx index f4e01ba122a..3f925c5c510 100644 --- a/src/react/hooks/__tests__/useMutation.test.tsx +++ b/src/react/hooks/__tests__/useMutation.test.tsx @@ -26,10 +26,9 @@ import { NetworkStatus, } from "@apollo/client"; import { InMemoryCache } from "@apollo/client/cache"; -import { Defer20220824Handler } from "@apollo/client/incremental"; import { BatchHttpLink } from "@apollo/client/link/batch-http"; import { ApolloProvider, useMutation, useQuery } from "@apollo/client/react"; -import { MockLink, MockSubscriptionLink } from "@apollo/client/testing"; +import { MockLink } from "@apollo/client/testing"; import { spyOnConsole, wait } from "@apollo/client/testing/internal"; import { MockedProvider } from "@apollo/client/testing/react"; import type { DeepPartial } from "@apollo/client/utilities"; @@ -3922,381 +3921,6 @@ describe("useMutation Hook", () => { await waitFor(() => screen.findByText("item 3")); }); }); - describe("defer", () => { - const CREATE_TODO_MUTATION_DEFER = gql` - mutation createTodo($description: String!, $priority: String) { - createTodo(description: $description, priority: $priority) { - id - ... @defer { - description - priority - } - } - } - `; - const variables = { - description: "Get milk!", - }; - it("resolves a deferred mutation with the full result", async () => { - using consoleSpies = spyOnConsole("error"); - const link = new MockSubscriptionLink(); - - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot, getCurrentSnapshot } = - await renderHookToSnapshotStream( - () => useMutation(CREATE_TODO_MUTATION_DEFER), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); - - { - const [, mutation] = await takeSnapshot(); - - expect(mutation).toStrictEqualTyped({ - data: undefined, - error: undefined, - loading: false, - called: false, - }); - } - - const [mutate] = getCurrentSnapshot(); - - const promise = mutate({ variables }); - - { - const [, mutation] = await takeSnapshot(); - - expect(mutation).toStrictEqualTyped({ - data: undefined, - error: undefined, - loading: true, - called: true, - }); - } - - setTimeout(() => { - link.simulateResult({ - result: { - data: { - createTodo: { - id: 1, - __typename: "Todo", - }, - }, - hasNext: true, - }, - }); - }); - - await expect(takeSnapshot).not.toRerender(); - - setTimeout(() => { - link.simulateResult( - { - result: { - incremental: [ - { - data: { - description: "Get milk!", - priority: "High", - __typename: "Todo", - }, - path: ["createTodo"], - }, - ], - hasNext: false, - }, - }, - true - ); - }); - - { - const [, mutation] = await takeSnapshot(); - - expect(mutation).toStrictEqualTyped({ - data: { - createTodo: { - id: 1, - description: "Get milk!", - priority: "High", - __typename: "Todo", - }, - }, - error: undefined, - loading: false, - called: true, - }); - } - - await expect(promise).resolves.toStrictEqualTyped({ - data: { - createTodo: { - id: 1, - description: "Get milk!", - priority: "High", - __typename: "Todo", - }, - }, - }); - - expect(consoleSpies.error).not.toHaveBeenCalled(); - }); - - it("resolves with resulting errors and calls onError callback", async () => { - using consoleSpies = spyOnConsole("error"); - const link = new MockSubscriptionLink(); - - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - incrementalHandler: new Defer20220824Handler(), - }); - - const onError = jest.fn(); - using _disabledAct = disableActEnvironment(); - const { takeSnapshot, getCurrentSnapshot } = - await renderHookToSnapshotStream( - () => useMutation(CREATE_TODO_MUTATION_DEFER, { onError }), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); - - { - const [, result] = await takeSnapshot(); - - expect(result).toStrictEqualTyped({ - data: undefined, - error: undefined, - loading: false, - called: false, - }); - } - - const [createTodo] = getCurrentSnapshot(); - - const promise = createTodo({ variables }); - - { - const [, result] = await takeSnapshot(); - - expect(result).toStrictEqualTyped({ - data: undefined, - error: undefined, - loading: true, - called: true, - }); - } - - link.simulateResult({ - result: { - data: { - createTodo: { - id: 1, - __typename: "Todo", - }, - }, - hasNext: true, - }, - }); - - await expect(takeSnapshot).not.toRerender(); - - link.simulateResult( - { - result: { - incremental: [ - { - data: null, - errors: [{ message: CREATE_TODO_ERROR }], - path: ["createTodo"], - }, - ], - hasNext: false, - }, - }, - true - ); - - await expect(promise).rejects.toThrow( - new CombinedGraphQLErrors({ errors: [{ message: CREATE_TODO_ERROR }] }) - ); - - { - const [, result] = await takeSnapshot(); - - expect(result).toStrictEqualTyped({ - data: undefined, - error: new CombinedGraphQLErrors({ - data: { createTodo: { __typename: "Todo", id: 1 } }, - errors: [{ message: CREATE_TODO_ERROR }], - }), - loading: false, - called: true, - }); - } - - await expect(takeSnapshot).not.toRerender(); - - expect(onError).toHaveBeenCalledTimes(1); - expect(onError).toHaveBeenLastCalledWith( - new CombinedGraphQLErrors({ - data: { createTodo: { __typename: "Todo", id: 1 } }, - errors: [{ message: CREATE_TODO_ERROR }], - }), - expect.anything() - ); - expect(consoleSpies.error).not.toHaveBeenCalled(); - }); - - it("calls the update function with the final merged result data", async () => { - using consoleSpies = spyOnConsole("error"); - const link = new MockSubscriptionLink(); - const update = jest.fn(); - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot, getCurrentSnapshot } = - await renderHookToSnapshotStream( - () => useMutation(CREATE_TODO_MUTATION_DEFER, { update }), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); - - { - const [, result] = await takeSnapshot(); - - expect(result).toStrictEqualTyped({ - data: undefined, - error: undefined, - loading: false, - called: false, - }); - } - - const [createTodo] = getCurrentSnapshot(); - - const promiseReturnedByMutate = createTodo({ variables }); - - { - const [, result] = await takeSnapshot(); - - expect(result).toStrictEqualTyped({ - data: undefined, - error: undefined, - loading: true, - called: true, - }); - } - - link.simulateResult({ - result: { - data: { - createTodo: { - id: 1, - __typename: "Todo", - }, - }, - hasNext: true, - }, - }); - - await expect(takeSnapshot).not.toRerender(); - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - description: "Get milk!", - priority: "High", - __typename: "Todo", - }, - path: ["createTodo"], - }, - ], - hasNext: false, - }, - }, - true - ); - - await expect(promiseReturnedByMutate).resolves.toStrictEqualTyped({ - data: { - createTodo: { - id: 1, - description: "Get milk!", - priority: "High", - __typename: "Todo", - }, - }, - }); - - { - const [, result] = await takeSnapshot(); - - expect(result).toStrictEqualTyped({ - data: { - createTodo: { - id: 1, - description: "Get milk!", - priority: "High", - __typename: "Todo", - }, - }, - error: undefined, - loading: false, - called: true, - }); - } - - await expect(takeSnapshot).not.toRerender(); - - expect(update).toHaveBeenCalledTimes(1); - expect(update).toHaveBeenCalledWith( - // the first item is the cache, which we don't need to make any - // assertions against in this test - expect.anything(), - // second argument is the result - expect.objectContaining({ - data: { - createTodo: { - id: 1, - description: "Get milk!", - priority: "High", - __typename: "Todo", - }, - }, - }), - // third argument is an object containing context and variables - // but we only care about variables here - expect.objectContaining({ variables }) - ); - - expect(consoleSpies.error).not.toHaveBeenCalled(); - }); - }); }); describe("data masking", () => { diff --git a/src/react/hooks/__tests__/useMutation/defer20220824.test.tsx b/src/react/hooks/__tests__/useMutation/defer20220824.test.tsx new file mode 100644 index 00000000000..45b046eefd6 --- /dev/null +++ b/src/react/hooks/__tests__/useMutation/defer20220824.test.tsx @@ -0,0 +1,412 @@ +import { + disableActEnvironment, + renderHookToSnapshotStream, +} from "@testing-library/react-render-stream"; +import { gql } from "graphql-tag"; + +import { ApolloClient, CombinedGraphQLErrors } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import { useMutation } from "@apollo/client/react"; +import { MockSubscriptionLink } from "@apollo/client/testing"; +import { + createClientWrapper, + spyOnConsole, +} from "@apollo/client/testing/internal"; + +const CREATE_TODO_ERROR = "Failed to create item"; + +test("resolves a deferred mutation with the full result", async () => { + using _ = spyOnConsole("error"); + const mutation = gql` + mutation createTodo($description: String!, $priority: String) { + createTodo(description: $description, priority: $priority) { + id + ... @defer { + description + priority + } + } + } + `; + const variables = { + description: "Get milk!", + }; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation), + { wrapper: createClientWrapper(client) } + ); + + { + const [, mutation] = await takeSnapshot(); + + expect(mutation).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [mutate] = getCurrentSnapshot(); + + const promise = mutate({ variables }); + + { + const [, mutation] = await takeSnapshot(); + + expect(mutation).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + setTimeout(() => { + link.simulateResult({ + result: { + data: { + createTodo: { + id: 1, + __typename: "Todo", + }, + }, + hasNext: true, + }, + }); + }); + + await expect(takeSnapshot).not.toRerender(); + + setTimeout(() => { + link.simulateResult( + { + result: { + incremental: [ + { + data: { + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + path: ["createTodo"], + }, + ], + hasNext: false, + }, + }, + true + ); + }); + + { + const [, mutation] = await takeSnapshot(); + + expect(mutation).toStrictEqualTyped({ + data: { + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + }, + error: undefined, + loading: false, + called: true, + }); + } + + await expect(promise).resolves.toStrictEqualTyped({ + data: { + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + }, + }); + + expect(console.error).not.toHaveBeenCalled(); +}); + +test("resolves with resulting errors and calls onError callback", async () => { + using _ = spyOnConsole("error"); + const mutation = gql` + mutation createTodo($description: String!, $priority: String) { + createTodo(description: $description, priority: $priority) { + id + ... @defer { + description + priority + } + } + } + `; + const variables = { + description: "Get milk!", + }; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const onError = jest.fn(); + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation, { onError }), + { + wrapper: createClientWrapper(client), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [createTodo] = getCurrentSnapshot(); + + const promise = createTodo({ variables }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + link.simulateResult({ + result: { + data: { + createTodo: { + id: 1, + __typename: "Todo", + }, + }, + hasNext: true, + }, + }); + + await expect(takeSnapshot).not.toRerender(); + + link.simulateResult( + { + result: { + incremental: [ + { + data: null, + errors: [{ message: CREATE_TODO_ERROR }], + path: ["createTodo"], + }, + ], + hasNext: false, + }, + }, + true + ); + + await expect(promise).rejects.toThrow( + new CombinedGraphQLErrors({ errors: [{ message: CREATE_TODO_ERROR }] }) + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: new CombinedGraphQLErrors({ + data: { createTodo: { __typename: "Todo", id: 1 } }, + errors: [{ message: CREATE_TODO_ERROR }], + }), + loading: false, + called: true, + }); + } + + await expect(takeSnapshot).not.toRerender(); + + expect(onError).toHaveBeenCalledTimes(1); + expect(onError).toHaveBeenLastCalledWith( + new CombinedGraphQLErrors({ + data: { createTodo: { __typename: "Todo", id: 1 } }, + errors: [{ message: CREATE_TODO_ERROR }], + }), + expect.anything() + ); + expect(console.error).not.toHaveBeenCalled(); +}); + +test("calls the update function with the final merged result data", async () => { + using _ = spyOnConsole("error"); + const mutation = gql` + mutation createTodo($description: String!, $priority: String) { + createTodo(description: $description, priority: $priority) { + id + ... @defer { + description + priority + } + } + } + `; + const variables = { + description: "Get milk!", + }; + + const link = new MockSubscriptionLink(); + const update = jest.fn(); + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation, { update }), + { + wrapper: createClientWrapper(client), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [createTodo] = getCurrentSnapshot(); + + const promiseReturnedByMutate = createTodo({ variables }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + link.simulateResult({ + result: { + data: { + createTodo: { + id: 1, + __typename: "Todo", + }, + }, + hasNext: true, + }, + }); + + await expect(takeSnapshot).not.toRerender(); + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + path: ["createTodo"], + }, + ], + hasNext: false, + }, + }, + true + ); + + await expect(promiseReturnedByMutate).resolves.toStrictEqualTyped({ + data: { + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + }, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: { + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + }, + error: undefined, + loading: false, + called: true, + }); + } + + await expect(takeSnapshot).not.toRerender(); + + expect(update).toHaveBeenCalledTimes(1); + expect(update).toHaveBeenCalledWith( + // the first item is the cache, which we don't need to make any + // assertions against in this test + expect.anything(), + // second argument is the result + expect.objectContaining({ + data: { + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + }, + }), + // third argument is an object containing context and variables + // but we only care about variables here + expect.objectContaining({ variables }) + ); + + expect(console.error).not.toHaveBeenCalled(); +}); + From 013c56883b0aa2c73960ba7d142d9229406695b0 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 23:29:45 -0600 Subject: [PATCH 065/254] Move useLoadableQuery defer tests to own file --- .../hooks/__tests__/useLoadableQuery.test.tsx | 324 ------------- .../useLoadableQuery/defer20220824.test.tsx | 436 ++++++++++++++++++ 2 files changed, 436 insertions(+), 324 deletions(-) create mode 100644 src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx diff --git a/src/react/hooks/__tests__/useLoadableQuery.test.tsx b/src/react/hooks/__tests__/useLoadableQuery.test.tsx index 53818cc4eda..1a49e9dca1c 100644 --- a/src/react/hooks/__tests__/useLoadableQuery.test.tsx +++ b/src/react/hooks/__tests__/useLoadableQuery.test.tsx @@ -1531,163 +1531,6 @@ it("works with startTransition to change variables", async () => { }); }); -it('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { - interface Data { - greeting: { - __typename: string; - message: string; - recipient: { name: string; __typename: string }; - }; - } - - const query: TypedDocumentNode> = gql` - query { - greeting { - message - ... @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - const cache = new InMemoryCache(); - cache.writeQuery({ - query, - data: { - greeting: { - __typename: "Greeting", - message: "Hello cached", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - }); - const client = new ApolloClient({ - cache, - link, - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const renderStream = createDefaultProfiler(); - const { SuspenseFallback, ReadQueryHook } = - createDefaultProfiledComponents(renderStream); - - function App() { - useTrackRenders(); - const [loadQuery, queryRef] = useLoadableQuery(query, { - fetchPolicy: "cache-and-network", - }); - return ( -
- - }> - {queryRef && } - -
- ); - } - - const { user } = await renderWithClient( - , - { - client, - }, - renderStream - ); - - // initial render - await renderStream.takeRender(); - - await user.click(screen.getByText("Load todo")); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); - - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello cached", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "complete", - error: undefined, - networkStatus: NetworkStatus.loading, - }); - } - - link.simulateResult({ - result: { - data: { - greeting: { __typename: "Greeting", message: "Hello world" }, - }, - hasNext: true, - }, - }); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "streaming", - error: undefined, - networkStatus: NetworkStatus.streaming, - }); - } - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - error: undefined, - networkStatus: NetworkStatus.ready, - }); - } - - await expect(renderStream).not.toRerender(); -}); it("reacts to cache updates", async () => { const { query, mocks } = useSimpleQueryCase(); @@ -4553,173 +4396,6 @@ it('suspends and does not use partial data when changing variables and using a " } }); -it('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - interface QueryData { - greeting: { - __typename: string; - message?: string; - recipient?: { - __typename: string; - name: string; - }; - }; - } - - const query: TypedDocumentNode> = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - const cache = new InMemoryCache(); - - { - // We are intentionally writing partial data to the cache. Supress console - // warnings to avoid unnecessary noise in the test. - using _consoleSpy = spyOnConsole("error"); - - cache.writeQuery({ - query, - data: { - greeting: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - }); - } - - const client = new ApolloClient({ - link, - cache, - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const renderStream = createDefaultProfiler>(); - const { SuspenseFallback, ReadQueryHook } = - createDefaultProfiledComponents(renderStream); - - function App() { - useTrackRenders(); - const [loadTodo, queryRef] = useLoadableQuery(query, { - fetchPolicy: "cache-first", - returnPartialData: true, - }); - - return ( -
- - }> - {queryRef && } - -
- ); - } - - const { user } = await renderWithClient( - , - { - client, - }, - renderStream - ); - - // initial render - await renderStream.takeRender(); - - await user.click(screen.getByText("Load todo")); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "partial", - error: undefined, - networkStatus: NetworkStatus.loading, - }); - } - - link.simulateResult({ - result: { - data: { - greeting: { message: "Hello world", __typename: "Greeting" }, - }, - hasNext: true, - }, - }); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Cached Alice" }, - }, - }, - dataState: "streaming", - error: undefined, - networkStatus: NetworkStatus.streaming, - }); - } - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - __typename: "Greeting", - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - error: undefined, - networkStatus: NetworkStatus.ready, - }); - } - - await expect(renderStream).not.toRerender(); -}); it("throws when calling loadQuery on first render", async () => { // We don't provide this functionality with React 19 anymore since it requires internals access diff --git a/src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx new file mode 100644 index 00000000000..51d424a6144 --- /dev/null +++ b/src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx @@ -0,0 +1,436 @@ +import { screen } from "@testing-library/react"; +import type { + AsyncRenderFn, + RenderStream, +} from "@testing-library/react-render-stream"; +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import { userEvent } from "@testing-library/user-event"; +import React, { Suspense } from "react"; +import { ErrorBoundary as ReactErrorBoundary } from "react-error-boundary"; + +import type { DataState, TypedDocumentNode } from "@apollo/client"; +import { ApolloClient, gql, NetworkStatus } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import type { QueryRef } from "@apollo/client/react"; +import { + ApolloProvider, + useLoadableQuery, + useReadQuery, +} from "@apollo/client/react"; +import { MockSubscriptionLink } from "@apollo/client/testing"; +import { renderAsync, spyOnConsole } from "@apollo/client/testing/internal"; +import type { DeepPartial } from "@apollo/client/utilities"; + +function createDefaultProfiler() { + return createRenderStream({ + initialSnapshot: { + error: null as Error | null, + result: null as useReadQuery.Result | null, + }, + skipNonTrackingRenders: true, + }); +} + +function createDefaultProfiledComponents< + Snapshot extends { + result: useReadQuery.Result | null; + error?: Error | null; + }, + TData = Snapshot["result"] extends useReadQuery.Result | null ? + TData + : unknown, + TStates extends DataState["dataState"] = Snapshot["result"] extends ( + useReadQuery.Result | null + ) ? + TStates + : "complete" | "streaming", +>(profiler: RenderStream) { + function SuspenseFallback() { + useTrackRenders(); + return

Loading

; + } + + function ReadQueryHook({ + queryRef, + }: { + queryRef: QueryRef; + }) { + useTrackRenders(); + profiler.mergeSnapshot({ + result: useReadQuery(queryRef), + } as unknown as Partial); + + return null; + } + + function ErrorFallback({ error }: { error: Error }) { + useTrackRenders(); + profiler.mergeSnapshot({ error } as Partial); + + return
Oops
; + } + + function ErrorBoundary({ children }: { children: React.ReactNode }) { + return ( + + {children} + + ); + } + + return { + SuspenseFallback, + ReadQueryHook, + ErrorFallback, + ErrorBoundary, + }; +} + +async function renderWithClient( + ui: React.ReactElement, + options: { client: ApolloClient }, + { render: doRender }: { render: AsyncRenderFn | typeof renderAsync } +) { + const { client } = options; + const user = userEvent.setup(); + + const utils = await doRender(ui, { + wrapper: ({ children }: { children: React.ReactNode }) => ( + {children} + ), + }); + + return { ...utils, user }; +} + +test('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + interface Data { + greeting: { + __typename: string; + message: string; + recipient: { name: string; __typename: string }; + }; + } + + const query: TypedDocumentNode> = gql` + query { + greeting { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + const cache = new InMemoryCache(); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + const client = new ApolloClient({ + cache, + link, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const renderStream = createDefaultProfiler(); + const { SuspenseFallback, ReadQueryHook } = + createDefaultProfiledComponents(renderStream); + + function App() { + useTrackRenders(); + const [loadQuery, queryRef] = useLoadableQuery(query, { + fetchPolicy: "cache-and-network", + }); + return ( +
+ + }> + {queryRef && } + +
+ ); + } + + const { user } = await renderWithClient( + , + { + client, + }, + { render: renderAsync } + ); + + // initial render + await renderStream.takeRender(); + + await user.click(screen.getByText("Load todo")); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); + + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + link.simulateResult({ + result: { + data: { + greeting: { __typename: "Greeting", message: "Hello world" }, + }, + hasNext: true, + }, + }); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(renderStream).not.toRerender(); +}); + +test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + interface QueryData { + greeting: { + __typename: string; + message?: string; + recipient?: { + __typename: string; + name: string; + }; + }; + } + + const query: TypedDocumentNode> = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + const cache = new InMemoryCache(); + + { + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + using _consoleSpy = spyOnConsole("error"); + + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + } + + const client = new ApolloClient({ + link, + cache, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const renderStream = createDefaultProfiler>(); + const { SuspenseFallback, ReadQueryHook } = + createDefaultProfiledComponents(renderStream); + + function App() { + useTrackRenders(); + const [loadTodo, queryRef] = useLoadableQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }); + + return ( +
+ + }> + {queryRef && } + +
+ ); + } + + const { user } = await renderWithClient( + , + { + client, + }, + { render: renderAsync } + ); + + // initial render + await renderStream.takeRender(); + + await user.click(screen.getByText("Load todo")); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "partial", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + link.simulateResult({ + result: { + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + hasNext: true, + }, + }); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(renderStream).not.toRerender(); +}); + From 3e0d4deb3842bd8f4d90ee5b7a1c1e8554b4b5a0 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 23:30:04 -0600 Subject: [PATCH 066/254] Move createQueryPreloader tests to own file --- .../__tests__/createQueryPreloader.test.tsx | 90 --------- .../defer20220824.test.tsx | 173 ++++++++++++++++++ 2 files changed, 173 insertions(+), 90 deletions(-) create mode 100644 src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx diff --git a/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx b/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx index f05defde306..96f4838e0d3 100644 --- a/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx +++ b/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx @@ -1806,96 +1806,6 @@ test("does not suspend and returns partial data when `returnPartialData` is `tru } }); -test("suspends deferred queries until initial chunk loads then rerenders with deferred data", async () => { - const query = gql` - query { - greeting { - message - ... on Greeting @defer { - recipient { - name - } - } - } - } - `; - - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const preloadQuery = createQueryPreloader(client); - const queryRef = preloadQuery(query); - - using _disabledAct = disableActEnvironment(); - const { renderStream } = await renderDefaultTestApp({ client, queryRef }); - - { - const { renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual(["App", "SuspenseFallback"]); - } - - link.simulateResult({ - result: { - data: { greeting: { message: "Hello world", __typename: "Greeting" } }, - hasNext: true, - }, - }); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual(["ReadQueryHook"]); - expect(snapshot.result).toStrictEqualTyped({ - data: markAsStreaming({ - greeting: { message: "Hello world", __typename: "Greeting" }, - }), - dataState: "streaming", - error: undefined, - networkStatus: NetworkStatus.streaming, - }); - } - - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, - }, - }, - true - ); - - { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual(["ReadQueryHook"]); - expect(snapshot.result).toStrictEqualTyped({ - data: { - greeting: { - __typename: "Greeting", - message: "Hello world", - recipient: { __typename: "Person", name: "Alice" }, - }, - }, - dataState: "complete", - error: undefined, - networkStatus: NetworkStatus.ready, - }); - } -}); test("masks result when dataMasking is `true`", async () => { const { query, mocks } = setupMaskedVariablesCase(); diff --git a/src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx b/src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx new file mode 100644 index 00000000000..ca4b7f09998 --- /dev/null +++ b/src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx @@ -0,0 +1,173 @@ +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import React, { Suspense } from "react"; +import { ErrorBoundary } from "react-error-boundary"; + +import type { DataState } from "@apollo/client"; +import { ApolloClient, gql, NetworkStatus } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import type { QueryRef } from "@apollo/client/react"; +import { + ApolloProvider, + createQueryPreloader, + useReadQuery, +} from "@apollo/client/react"; +import { MockSubscriptionLink } from "@apollo/client/testing"; +import { markAsStreaming } from "@apollo/client/testing/internal"; + +async function renderDefaultTestApp< + TData, + TStates extends DataState["dataState"] = "complete" | "streaming", +>({ + client, + queryRef, +}: { + client: ApolloClient; + queryRef: QueryRef; +}) { + const renderStream = createRenderStream({ + initialSnapshot: { + result: null as useReadQuery.Result | null, + error: null as Error | null, + }, + }); + + function ReadQueryHook() { + useTrackRenders({ name: "ReadQueryHook" }); + renderStream.mergeSnapshot({ result: useReadQuery(queryRef) }); + + return null; + } + + function SuspenseFallback() { + useTrackRenders({ name: "SuspenseFallback" }); + return

Loading

; + } + + function ErrorFallback({ error }: { error: Error }) { + useTrackRenders({ name: "ErrorFallback" }); + renderStream.mergeSnapshot({ error }); + + return null; + } + + function App() { + useTrackRenders({ name: "App" }); + + return ( + + }> + + + + ); + } + + const utils = await renderStream.render(, { + wrapper: ({ children }) => ( + {children} + ), + }); + + function rerender() { + return utils.rerender(); + } + + return { ...utils, rerender, renderStream }; +} + +test("suspends deferred queries until initial chunk loads then rerenders with deferred data", async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const preloadQuery = createQueryPreloader(client); + const queryRef = preloadQuery(query); + + using _disabledAct = disableActEnvironment(); + const { renderStream } = await renderDefaultTestApp({ client, queryRef }); + + { + const { renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual(["App", "SuspenseFallback"]); + } + + link.simulateResult({ + result: { + data: { greeting: { message: "Hello world", __typename: "Greeting" } }, + hasNext: true, + }, + }); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual(["ReadQueryHook"]); + expect(snapshot.result).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { message: "Hello world", __typename: "Greeting" }, + }), + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual(["ReadQueryHook"]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } +}); + From 6b0e0027b45b83b7d08cba5f23d4198ff5d927b7 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 4 Sep 2025 23:36:10 -0600 Subject: [PATCH 067/254] Use defer helpers instead of mock subscription link --- .../useLoadableQuery/defer20220824.test.tsx | 94 ++++++------ .../useMutation/defer20220824.test.tsx | 140 ++++++++---------- .../defer20220824.test.tsx | 47 +++--- 3 files changed, 124 insertions(+), 157 deletions(-) diff --git a/src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx index 51d424a6144..f27a4a14123 100644 --- a/src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx @@ -22,8 +22,11 @@ import { useLoadableQuery, useReadQuery, } from "@apollo/client/react"; -import { MockSubscriptionLink } from "@apollo/client/testing"; -import { renderAsync, spyOnConsole } from "@apollo/client/testing/internal"; +import { + mockDefer20220824, + renderAsync, + spyOnConsole, +} from "@apollo/client/testing/internal"; import type { DeepPartial } from "@apollo/client/utilities"; function createDefaultProfiler() { @@ -130,7 +133,9 @@ test('does not suspend deferred queries with data in the cache and using a "cach } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + const cache = new InMemoryCache(); cache.writeQuery({ query, @@ -144,7 +149,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); const client = new ApolloClient({ cache, - link, + link: httpLink, incrementalHandler: new Defer20220824Handler(), }); @@ -200,13 +205,11 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); } - link.simulateResult({ - result: { - data: { - greeting: { __typename: "Greeting", message: "Hello world" }, - }, - hasNext: true, + enqueueInitialChunk({ + data: { + greeting: { __typename: "Greeting", message: "Hello world" }, }, + hasNext: true, }); { @@ -227,23 +230,18 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); } - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], }, - }, - true - ); + ], + hasNext: false, + }); { const { snapshot, renderedComponents } = await renderStream.takeRender(); @@ -291,7 +289,9 @@ test('does not suspend deferred queries with partial data in the cache and using } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + const cache = new InMemoryCache(); { @@ -311,7 +311,7 @@ test('does not suspend deferred queries with partial data in the cache and using } const client = new ApolloClient({ - link, + link: httpLink, cache, incrementalHandler: new Defer20220824Handler(), }); @@ -368,13 +368,11 @@ test('does not suspend deferred queries with partial data in the cache and using }); } - link.simulateResult({ - result: { - data: { - greeting: { message: "Hello world", __typename: "Greeting" }, - }, - hasNext: true, + enqueueInitialChunk({ + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, }, + hasNext: true, }); { @@ -395,23 +393,18 @@ test('does not suspend deferred queries with partial data in the cache and using }); } - link.simulateResult( - { - result: { - incremental: [ - { - data: { - __typename: "Greeting", - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greeting"], - }, - ], - hasNext: false, + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], }, - }, - true - ); + ], + hasNext: false, + }); { const { snapshot, renderedComponents } = await renderStream.takeRender(); @@ -433,4 +426,3 @@ test('does not suspend deferred queries with partial data in the cache and using await expect(renderStream).not.toRerender(); }); - diff --git a/src/react/hooks/__tests__/useMutation/defer20220824.test.tsx b/src/react/hooks/__tests__/useMutation/defer20220824.test.tsx index 45b046eefd6..42ebb4e9cde 100644 --- a/src/react/hooks/__tests__/useMutation/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useMutation/defer20220824.test.tsx @@ -8,9 +8,9 @@ import { ApolloClient, CombinedGraphQLErrors } from "@apollo/client"; import { InMemoryCache } from "@apollo/client/cache"; import { Defer20220824Handler } from "@apollo/client/incremental"; import { useMutation } from "@apollo/client/react"; -import { MockSubscriptionLink } from "@apollo/client/testing"; import { createClientWrapper, + mockDefer20220824, spyOnConsole, } from "@apollo/client/testing/internal"; @@ -33,10 +33,11 @@ test("resolves a deferred mutation with the full result", async () => { description: "Get milk!", }; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); const client = new ApolloClient({ - link, + link: httpLink, cache: new InMemoryCache(), incrementalHandler: new Defer20220824Handler(), }); @@ -73,41 +74,30 @@ test("resolves a deferred mutation with the full result", async () => { }); } - setTimeout(() => { - link.simulateResult({ - result: { - data: { - createTodo: { - id: 1, - __typename: "Todo", - }, - }, - hasNext: true, + enqueueInitialChunk({ + data: { + createTodo: { + id: 1, + __typename: "Todo", }, - }); + }, + hasNext: true, }); await expect(takeSnapshot).not.toRerender(); - setTimeout(() => { - link.simulateResult( + enqueueSubsequentChunk({ + incremental: [ { - result: { - incremental: [ - { - data: { - description: "Get milk!", - priority: "High", - __typename: "Todo", - }, - path: ["createTodo"], - }, - ], - hasNext: false, + data: { + description: "Get milk!", + priority: "High", + __typename: "Todo", }, + path: ["createTodo"], }, - true - ); + ], + hasNext: false, }); { @@ -159,10 +149,11 @@ test("resolves with resulting errors and calls onError callback", async () => { description: "Get milk!", }; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); const client = new ApolloClient({ - link, + link: httpLink, cache: new InMemoryCache(), incrementalHandler: new Defer20220824Handler(), }); @@ -202,35 +193,28 @@ test("resolves with resulting errors and calls onError callback", async () => { }); } - link.simulateResult({ - result: { - data: { - createTodo: { - id: 1, - __typename: "Todo", - }, + enqueueInitialChunk({ + data: { + createTodo: { + id: 1, + __typename: "Todo", }, - hasNext: true, }, + hasNext: true, }); await expect(takeSnapshot).not.toRerender(); - link.simulateResult( - { - result: { - incremental: [ - { - data: null, - errors: [{ message: CREATE_TODO_ERROR }], - path: ["createTodo"], - }, - ], - hasNext: false, + enqueueSubsequentChunk({ + incremental: [ + { + data: null, + errors: [{ message: CREATE_TODO_ERROR }], + path: ["createTodo"], }, - }, - true - ); + ], + hasNext: false, + }); await expect(promise).rejects.toThrow( new CombinedGraphQLErrors({ errors: [{ message: CREATE_TODO_ERROR }] }) @@ -280,10 +264,11 @@ test("calls the update function with the final merged result data", async () => description: "Get milk!", }; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); const update = jest.fn(); const client = new ApolloClient({ - link, + link: httpLink, cache: new InMemoryCache(), incrementalHandler: new Defer20220824Handler(), }); @@ -322,38 +307,31 @@ test("calls the update function with the final merged result data", async () => }); } - link.simulateResult({ - result: { - data: { - createTodo: { - id: 1, - __typename: "Todo", - }, + enqueueInitialChunk({ + data: { + createTodo: { + id: 1, + __typename: "Todo", }, - hasNext: true, }, + hasNext: true, }); await expect(takeSnapshot).not.toRerender(); - link.simulateResult( - { - result: { - incremental: [ - { - data: { - description: "Get milk!", - priority: "High", - __typename: "Todo", - }, - path: ["createTodo"], - }, - ], - hasNext: false, + enqueueSubsequentChunk({ + incremental: [ + { + data: { + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + path: ["createTodo"], }, - }, - true - ); + ], + hasNext: false, + }); await expect(promiseReturnedByMutate).resolves.toStrictEqualTyped({ data: { diff --git a/src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx b/src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx index ca4b7f09998..196afa27533 100644 --- a/src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx +++ b/src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx @@ -16,8 +16,10 @@ import { createQueryPreloader, useReadQuery, } from "@apollo/client/react"; -import { MockSubscriptionLink } from "@apollo/client/testing"; -import { markAsStreaming } from "@apollo/client/testing/internal"; +import { + markAsStreaming, + mockDefer20220824, +} from "@apollo/client/testing/internal"; async function renderDefaultTestApp< TData, @@ -94,10 +96,12 @@ test("suspends deferred queries until initial chunk loads then rerenders with de } `; - const link = new MockSubscriptionLink(); + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + const client = new ApolloClient({ cache: new InMemoryCache(), - link, + link: httpLink, incrementalHandler: new Defer20220824Handler(), }); @@ -113,11 +117,9 @@ test("suspends deferred queries until initial chunk loads then rerenders with de expect(renderedComponents).toStrictEqual(["App", "SuspenseFallback"]); } - link.simulateResult({ - result: { - data: { greeting: { message: "Hello world", __typename: "Greeting" } }, - hasNext: true, - }, + enqueueInitialChunk({ + data: { greeting: { message: "Hello world", __typename: "Greeting" } }, + hasNext: true, }); { @@ -134,23 +136,18 @@ test("suspends deferred queries until initial chunk loads then rerenders with de }); } - link.simulateResult( - { - result: { - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - __typename: "Greeting", - }, - path: ["greeting"], - }, - ], - hasNext: false, + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], }, - }, - true - ); + ], + hasNext: false, + }); { const { snapshot, renderedComponents } = await renderStream.takeRender(); From 560824e5b9953b8070733f88697d934e976d7330 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 00:09:20 -0600 Subject: [PATCH 068/254] Use render helper in useLoadableQuery similar to other tests --- .../useLoadableQuery/defer20220824.test.tsx | 276 ++++++++---------- 1 file changed, 125 insertions(+), 151 deletions(-) diff --git a/src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx index f27a4a14123..26a07ede75c 100644 --- a/src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useLoadableQuery/defer20220824.test.tsx @@ -1,114 +1,113 @@ -import { screen } from "@testing-library/react"; -import type { - AsyncRenderFn, - RenderStream, -} from "@testing-library/react-render-stream"; +import type { RenderOptions } from "@testing-library/react"; import { createRenderStream, disableActEnvironment, useTrackRenders, } from "@testing-library/react-render-stream"; -import { userEvent } from "@testing-library/user-event"; import React, { Suspense } from "react"; -import { ErrorBoundary as ReactErrorBoundary } from "react-error-boundary"; +import { ErrorBoundary } from "react-error-boundary"; -import type { DataState, TypedDocumentNode } from "@apollo/client"; +import type { + DataState, + ErrorLike, + OperationVariables, + TypedDocumentNode, +} from "@apollo/client"; import { ApolloClient, gql, NetworkStatus } from "@apollo/client"; import { InMemoryCache } from "@apollo/client/cache"; import { Defer20220824Handler } from "@apollo/client/incremental"; import type { QueryRef } from "@apollo/client/react"; +import { useLoadableQuery, useReadQuery } from "@apollo/client/react"; import { - ApolloProvider, - useLoadableQuery, - useReadQuery, -} from "@apollo/client/react"; -import { + createClientWrapper, mockDefer20220824, - renderAsync, spyOnConsole, } from "@apollo/client/testing/internal"; -import type { DeepPartial } from "@apollo/client/utilities"; +import { invariant } from "@apollo/client/utilities/invariant"; + +async function renderHook< + TData, + TVariables extends OperationVariables, + TStates extends DataState["dataState"] = DataState["dataState"], + Props = never, +>( + renderHook: ( + props: Props extends never ? undefined : Props + ) => useLoadableQuery.Result, + options: Pick & { initialProps?: Props } +) { + function UseReadQuery({ + queryRef, + }: { + queryRef: QueryRef; + }) { + useTrackRenders({ name: "useReadQuery" }); + mergeSnapshot({ result: useReadQuery(queryRef) }); -function createDefaultProfiler() { - return createRenderStream({ - initialSnapshot: { - error: null as Error | null, - result: null as useReadQuery.Result | null, - }, - skipNonTrackingRenders: true, - }); -} + return null; + } -function createDefaultProfiledComponents< - Snapshot extends { - result: useReadQuery.Result | null; - error?: Error | null; - }, - TData = Snapshot["result"] extends useReadQuery.Result | null ? - TData - : unknown, - TStates extends DataState["dataState"] = Snapshot["result"] extends ( - useReadQuery.Result | null - ) ? - TStates - : "complete" | "streaming", ->(profiler: RenderStream) { function SuspenseFallback() { - useTrackRenders(); - return

Loading

; + useTrackRenders({ name: "SuspenseFallback" }); + + return null; } - function ReadQueryHook({ - queryRef, - }: { - queryRef: QueryRef; - }) { - useTrackRenders(); - profiler.mergeSnapshot({ - result: useReadQuery(queryRef), - } as unknown as Partial); + function ErrorFallback() { + useTrackRenders({ name: "ErrorBoundary" }); return null; } - function ErrorFallback({ error }: { error: Error }) { - useTrackRenders(); - profiler.mergeSnapshot({ error } as Partial); + function App({ props }: { props: Props | undefined }) { + useTrackRenders({ name: "useLoadableQuery" }); + const [loadQuery, queryRef] = renderHook(props as any); - return
Oops
; - } + mergeSnapshot({ loadQuery }); - function ErrorBoundary({ children }: { children: React.ReactNode }) { return ( - - {children} - + }> + replaceSnapshot({ error })} + > + {queryRef && } + + ); } - return { - SuspenseFallback, - ReadQueryHook, - ErrorFallback, - ErrorBoundary, - }; -} + const { + render, + getCurrentRender, + takeRender, + mergeSnapshot, + replaceSnapshot, + } = createRenderStream< + | { + loadQuery: useLoadableQuery.LoadQueryFunction; + result?: useReadQuery.Result; + } + | { error: ErrorLike } + >({ initialSnapshot: { loadQuery: null as any } }); -async function renderWithClient( - ui: React.ReactElement, - options: { client: ApolloClient }, - { render: doRender }: { render: AsyncRenderFn | typeof renderAsync } -) { - const { client } = options; - const user = userEvent.setup(); + const utils = await render(, options); - const utils = await doRender(ui, { - wrapper: ({ children }: { children: React.ReactNode }) => ( - {children} - ), - }); + function rerender(props: Props) { + return utils.rerender(); + } - return { ...utils, user }; + function getCurrentSnapshot() { + const { snapshot } = getCurrentRender(); + invariant( + "loadQuery" in snapshot, + "Expected rendered hook instead of error boundary" + ); + + return snapshot; + } + + return { takeRender, rerender, getCurrentSnapshot }; } test('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { @@ -154,43 +153,27 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); using _disabledAct = disableActEnvironment(); - const renderStream = createDefaultProfiler(); - const { SuspenseFallback, ReadQueryHook } = - createDefaultProfiledComponents(renderStream); - - function App() { - useTrackRenders(); - const [loadQuery, queryRef] = useLoadableQuery(query, { - fetchPolicy: "cache-and-network", - }); - return ( -
- - }> - {queryRef && } - -
- ); - } - - const { user } = await renderWithClient( - , - { - client, - }, - { render: renderAsync } + const { takeRender, getCurrentSnapshot } = await renderHook( + () => useLoadableQuery(query, { fetchPolicy: "cache-and-network" }), + { wrapper: createClientWrapper(client) } ); - // initial render - await renderStream.takeRender(); + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useLoadableQuery"]); + } - await user.click(screen.getByText("Load todo")); + getCurrentSnapshot().loadQuery(); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); - - expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); + const { snapshot, renderedComponents } = await takeRender(); + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual([ + "useLoadableQuery", + "useReadQuery", + ]); expect(snapshot.result).toStrictEqualTyped({ data: { greeting: { @@ -213,9 +196,10 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([ReadQueryHook]); + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot.result).toStrictEqualTyped({ data: { greeting: { @@ -244,9 +228,10 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([ReadQueryHook]); + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot.result).toStrictEqualTyped({ data: { greeting: { @@ -261,7 +246,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach }); } - await expect(renderStream).not.toRerender(); + await expect(takeRender).not.toRerender(); }); test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { @@ -317,44 +302,31 @@ test('does not suspend deferred queries with partial data in the cache and using }); using _disabledAct = disableActEnvironment(); - const renderStream = createDefaultProfiler>(); - const { SuspenseFallback, ReadQueryHook } = - createDefaultProfiledComponents(renderStream); - - function App() { - useTrackRenders(); - const [loadTodo, queryRef] = useLoadableQuery(query, { - fetchPolicy: "cache-first", - returnPartialData: true, - }); - - return ( -
- - }> - {queryRef && } - -
- ); - } - - const { user } = await renderWithClient( - , - { - client, - }, - { render: renderAsync } + const { takeRender, getCurrentSnapshot } = await renderHook( + () => + useLoadableQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { wrapper: createClientWrapper(client) } ); - // initial render - await renderStream.takeRender(); + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useLoadableQuery"]); + } - await user.click(screen.getByText("Load todo")); + getCurrentSnapshot().loadQuery(); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([App, ReadQueryHook]); + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual([ + "useLoadableQuery", + "useReadQuery", + ]); expect(snapshot.result).toStrictEqualTyped({ data: { greeting: { @@ -376,9 +348,10 @@ test('does not suspend deferred queries with partial data in the cache and using }); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([ReadQueryHook]); + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot.result).toStrictEqualTyped({ data: { greeting: { @@ -407,9 +380,10 @@ test('does not suspend deferred queries with partial data in the cache and using }); { - const { snapshot, renderedComponents } = await renderStream.takeRender(); + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual([ReadQueryHook]); + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot.result).toStrictEqualTyped({ data: { greeting: { @@ -424,5 +398,5 @@ test('does not suspend deferred queries with partial data in the cache and using }); } - await expect(renderStream).not.toRerender(); + await expect(takeRender).not.toRerender(); }); From fe187416733f813f0f844560a5e572120ae2b7ee Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 00:09:39 -0600 Subject: [PATCH 069/254] Remove unused import --- src/react/hooks/__tests__/useLoadableQuery.test.tsx | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/react/hooks/__tests__/useLoadableQuery.test.tsx b/src/react/hooks/__tests__/useLoadableQuery.test.tsx index 1a49e9dca1c..bcd4be56365 100644 --- a/src/react/hooks/__tests__/useLoadableQuery.test.tsx +++ b/src/react/hooks/__tests__/useLoadableQuery.test.tsx @@ -32,7 +32,6 @@ import { NetworkStatus, } from "@apollo/client"; import { InMemoryCache } from "@apollo/client/cache"; -import { Defer20220824Handler } from "@apollo/client/incremental"; import type { QueryRef } from "@apollo/client/react"; import { ApolloProvider, @@ -1531,7 +1530,6 @@ it("works with startTransition to change variables", async () => { }); }); - it("reacts to cache updates", async () => { const { query, mocks } = useSimpleQueryCase(); const client = new ApolloClient({ @@ -4396,7 +4394,6 @@ it('suspends and does not use partial data when changing variables and using a " } }); - it("throws when calling loadQuery on first render", async () => { // We don't provide this functionality with React 19 anymore since it requires internals access if (IS_REACT_19) return; From 01d43251288c41be0ac73b78dcbc0fbcca8715b0 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 00:12:48 -0600 Subject: [PATCH 070/254] Add a useLoadableQuery test suite for deferGraphQL17Alpha9 --- .../deferGraphQL17Alpha9.test.tsx | 406 ++++++++++++++++++ 1 file changed, 406 insertions(+) create mode 100644 src/react/hooks/__tests__/useLoadableQuery/deferGraphQL17Alpha9.test.tsx diff --git a/src/react/hooks/__tests__/useLoadableQuery/deferGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useLoadableQuery/deferGraphQL17Alpha9.test.tsx new file mode 100644 index 00000000000..d43bdb16e73 --- /dev/null +++ b/src/react/hooks/__tests__/useLoadableQuery/deferGraphQL17Alpha9.test.tsx @@ -0,0 +1,406 @@ +import type { RenderOptions } from "@testing-library/react"; +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import React, { Suspense } from "react"; +import { ErrorBoundary } from "react-error-boundary"; + +import type { + DataState, + ErrorLike, + OperationVariables, + TypedDocumentNode, +} from "@apollo/client"; +import { ApolloClient, gql, NetworkStatus } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import type { QueryRef } from "@apollo/client/react"; +import { useLoadableQuery, useReadQuery } from "@apollo/client/react"; +import { + createClientWrapper, + mockDeferStreamGraphQL17Alpha9, + spyOnConsole, +} from "@apollo/client/testing/internal"; +import { invariant } from "@apollo/client/utilities/invariant"; + +async function renderHook< + TData, + TVariables extends OperationVariables, + TStates extends DataState["dataState"] = DataState["dataState"], + Props = never, +>( + renderHook: ( + props: Props extends never ? undefined : Props + ) => useLoadableQuery.Result, + options: Pick & { initialProps?: Props } +) { + function UseReadQuery({ + queryRef, + }: { + queryRef: QueryRef; + }) { + useTrackRenders({ name: "useReadQuery" }); + mergeSnapshot({ result: useReadQuery(queryRef) }); + + return null; + } + + function SuspenseFallback() { + useTrackRenders({ name: "SuspenseFallback" }); + + return null; + } + + function ErrorFallback() { + useTrackRenders({ name: "ErrorBoundary" }); + + return null; + } + + function App({ props }: { props: Props | undefined }) { + useTrackRenders({ name: "useLoadableQuery" }); + const [loadQuery, queryRef] = renderHook(props as any); + + mergeSnapshot({ loadQuery }); + + return ( + }> + replaceSnapshot({ error })} + > + {queryRef && } + + + ); + } + + const { + render, + getCurrentRender, + takeRender, + mergeSnapshot, + replaceSnapshot, + } = createRenderStream< + | { + loadQuery: useLoadableQuery.LoadQueryFunction; + result?: useReadQuery.Result; + } + | { error: ErrorLike } + >({ initialSnapshot: { loadQuery: null as any } }); + + const utils = await render(, options); + + function rerender(props: Props) { + return utils.rerender(); + } + + function getCurrentSnapshot() { + const { snapshot } = getCurrentRender(); + invariant( + "loadQuery" in snapshot, + "Expected rendered hook instead of error boundary" + ); + + return snapshot; + } + + return { takeRender, rerender, getCurrentSnapshot }; +} + +test('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + interface Data { + greeting: { + __typename: string; + message: string; + recipient: { name: string; __typename: string }; + }; + } + + const query: TypedDocumentNode> = gql` + query { + greeting { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const cache = new InMemoryCache(); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + const client = new ApolloClient({ + cache, + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderHook( + () => useLoadableQuery(query, { fetchPolicy: "cache-and-network" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useLoadableQuery"]); + } + + getCurrentSnapshot().loadQuery(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual([ + "useLoadableQuery", + "useReadQuery", + ]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + enqueueInitialChunk({ + data: { + greeting: { __typename: "Greeting", message: "Hello world" }, + }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + interface QueryData { + greeting: { + __typename: string; + message?: string; + recipient?: { + __typename: string; + name: string; + }; + }; + } + + const query: TypedDocumentNode> = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const cache = new InMemoryCache(); + + { + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + using _consoleSpy = spyOnConsole("error"); + + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + } + + const client = new ApolloClient({ + link: httpLink, + cache, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderHook( + () => + useLoadableQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useLoadableQuery"]); + } + + getCurrentSnapshot().loadQuery(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual([ + "useLoadableQuery", + "useReadQuery", + ]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "partial", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + enqueueInitialChunk({ + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + invariant("result" in snapshot); + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(takeRender).not.toRerender(); +}); \ No newline at end of file From 6e4ad159f1e7e0f2d4d8338c288f0d3015b00e39 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 00:14:33 -0600 Subject: [PATCH 071/254] Add a createQueryPreloader test suite for deferGraphQL17Alpha9 --- .../deferGraphQL17Alpha9.test.tsx | 171 ++++++++++++++++++ 1 file changed, 171 insertions(+) create mode 100644 src/react/query-preloader/__tests__/createQueryPreloader/deferGraphQL17Alpha9.test.tsx diff --git a/src/react/query-preloader/__tests__/createQueryPreloader/deferGraphQL17Alpha9.test.tsx b/src/react/query-preloader/__tests__/createQueryPreloader/deferGraphQL17Alpha9.test.tsx new file mode 100644 index 00000000000..c62bfeb7dc6 --- /dev/null +++ b/src/react/query-preloader/__tests__/createQueryPreloader/deferGraphQL17Alpha9.test.tsx @@ -0,0 +1,171 @@ +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import React, { Suspense } from "react"; +import { ErrorBoundary } from "react-error-boundary"; + +import type { DataState } from "@apollo/client"; +import { ApolloClient, gql, NetworkStatus } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import type { QueryRef } from "@apollo/client/react"; +import { + ApolloProvider, + createQueryPreloader, + useReadQuery, +} from "@apollo/client/react"; +import { + markAsStreaming, + mockDeferStreamGraphQL17Alpha9, +} from "@apollo/client/testing/internal"; + +async function renderDefaultTestApp< + TData, + TStates extends DataState["dataState"] = "complete" | "streaming", +>({ + client, + queryRef, +}: { + client: ApolloClient; + queryRef: QueryRef; +}) { + const renderStream = createRenderStream({ + initialSnapshot: { + result: null as useReadQuery.Result | null, + error: null as Error | null, + }, + }); + + function ReadQueryHook() { + useTrackRenders({ name: "ReadQueryHook" }); + renderStream.mergeSnapshot({ result: useReadQuery(queryRef) }); + + return null; + } + + function SuspenseFallback() { + useTrackRenders({ name: "SuspenseFallback" }); + return

Loading

; + } + + function ErrorFallback({ error }: { error: Error }) { + useTrackRenders({ name: "ErrorFallback" }); + renderStream.mergeSnapshot({ error }); + + return null; + } + + function App() { + useTrackRenders({ name: "App" }); + + return ( + + }> + + + + ); + } + + const utils = await renderStream.render(, { + wrapper: ({ children }) => ( + {children} + ), + }); + + function rerender() { + return utils.rerender(); + } + + return { ...utils, rerender, renderStream }; +} + +test("suspends deferred queries until initial chunk loads then rerenders with deferred data", async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const preloadQuery = createQueryPreloader(client); + const queryRef = preloadQuery(query); + + using _disabledAct = disableActEnvironment(); + const { renderStream } = await renderDefaultTestApp({ client, queryRef }); + + { + const { renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual(["App", "SuspenseFallback"]); + } + + enqueueInitialChunk({ + data: { greeting: { message: "Hello world", __typename: "Greeting" } }, + pending: [{ id: "0", path: ["greeting"] }], + hasNext: true, + }); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual(["ReadQueryHook"]); + expect(snapshot.result).toStrictEqualTyped({ + data: markAsStreaming({ + greeting: { message: "Hello world", __typename: "Greeting" }, + }), + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const { snapshot, renderedComponents } = await renderStream.takeRender(); + + expect(renderedComponents).toStrictEqual(["ReadQueryHook"]); + expect(snapshot.result).toStrictEqualTyped({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } +}); \ No newline at end of file From c217efa8ea861caf5bf6f0e8a0af22d57025677c Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 00:17:25 -0600 Subject: [PATCH 072/254] Add a doc block --- src/incremental/handlers/graphql17Alpha9.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index c8934c97fab..edad4b2834f 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -171,6 +171,10 @@ class IncrementalRequest } } +/** + * Provides handling for the incremental delivery specification implemented by + * graphql.js version `17.0.0-alpha.9`. + */ export class GraphQL17Alpha9Handler implements Incremental.Handler> { From 93ef59d683d8dd09fda9f4cc776e5fb95617bafa Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 00:18:04 -0600 Subject: [PATCH 073/254] Mark methods as internal --- src/incremental/handlers/graphql17Alpha9.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index edad4b2834f..71bce701865 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -178,6 +178,7 @@ class IncrementalRequest export class GraphQL17Alpha9Handler implements Incremental.Handler> { + /** @internal */ isIncrementalResult( result: ApolloLink.Result ): result is @@ -186,6 +187,7 @@ export class GraphQL17Alpha9Handler return "hasNext" in result; } + /** @internal */ prepareRequest(request: ApolloLink.Request): ApolloLink.Request { if (hasDirectives(["defer"], request.query)) { const context = request.context ?? {}; @@ -198,6 +200,7 @@ export class GraphQL17Alpha9Handler return request; } + /** @internal */ extractErrors(result: ApolloLink.Result) { const acc: GraphQLFormattedError[] = []; const push = ({ @@ -221,6 +224,7 @@ export class GraphQL17Alpha9Handler } } + /** @internal */ startRequest(_: { query: DocumentNode }) { return new IncrementalRequest(); } From 102756eff21734c970700473ad4f34f3d17289d9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 00:18:36 -0600 Subject: [PATCH 074/254] Run extract api --- .api-reports/api-report-incremental.api.md | 104 +++++++++++++++++++++ 1 file changed, 104 insertions(+) diff --git a/.api-reports/api-report-incremental.api.md b/.api-reports/api-report-incremental.api.md index efdd684674c..f5ce7d7230e 100644 --- a/.api-reports/api-report-incremental.api.md +++ b/.api-reports/api-report-incremental.api.md @@ -80,6 +80,102 @@ class DeferRequest> implements Incremental hasNext: boolean; } +// @public (undocumented) +export namespace GraphQL17Alpha9Handler { + // (undocumented) + export type Chunk = InitialResult | SubsequentResult; + // (undocumented) + export interface CompletedResult { + // (undocumented) + errors?: ReadonlyArray; + // (undocumented) + id: string; + } + // (undocumented) + export interface GraphQL17Alpha9Result extends HKT { + // (undocumented) + arg1: unknown; + // (undocumented) + arg2: unknown; + // (undocumented) + return: GraphQL17Alpha9Handler.Chunk>; + } + // (undocumented) + export interface IncrementalDeferResult> { + // (undocumented) + data: TData; + // (undocumented) + errors?: ReadonlyArray; + // (undocumented) + extensions?: Record; + // (undocumented) + id: string; + // (undocumented) + subPath?: Incremental.Path; + } + // (undocumented) + export type IncrementalResult = IncrementalDeferResult | IncrementalStreamResult; + // (undocumented) + export interface IncrementalStreamResult> { + // (undocumented) + errors?: ReadonlyArray; + // (undocumented) + extensions?: Record; + // (undocumented) + id: string; + // (undocumented) + items: TData; + // (undocumented) + subPath?: Incremental.Path; + } + // (undocumented) + export type InitialResult> = { + data: TData; + errors?: ReadonlyArray; + pending: ReadonlyArray; + hasNext: boolean; + extensions?: Record; + }; + // (undocumented) + export interface PendingResult { + // (undocumented) + id: string; + // (undocumented) + label?: string; + // (undocumented) + path: Incremental.Path; + } + // (undocumented) + export type SubsequentResult = { + hasNext: boolean; + pending?: ReadonlyArray; + incremental?: ReadonlyArray>; + completed?: ReadonlyArray; + extensions?: Record; + }; + // (undocumented) + export interface TypeOverrides { + // (undocumented) + AdditionalApolloLinkResultTypes: GraphQL17Alpha9Result; + } +} + +// @public +export class GraphQL17Alpha9Handler implements Incremental.Handler> { + // @internal @deprecated (undocumented) + extractErrors(result: ApolloLink.Result): GraphQLFormattedError[] | undefined; + // @internal @deprecated (undocumented) + isIncrementalResult(result: ApolloLink.Result): result is GraphQL17Alpha9Handler.InitialResult | GraphQL17Alpha9Handler.SubsequentResult; + // @internal @deprecated (undocumented) + prepareRequest(request: ApolloLink.Request): ApolloLink.Request; + // Warning: (ae-forgotten-export) The symbol "IncrementalRequest" needs to be exported by the entry point index.d.ts + // + // @internal @deprecated (undocumented) + startRequest(_: { + query: DocumentNode; + }): IncrementalRequest; +} + // @public (undocumented) export namespace Incremental { // @internal @deprecated (undocumented) @@ -106,6 +202,14 @@ export namespace Incremental { export type Path = ReadonlyArray; } +// @public (undocumented) +class IncrementalRequest implements Incremental.IncrementalRequest, TData> { + // (undocumented) + handle(cacheData: TData | DeepPartial | null | undefined, chunk: GraphQL17Alpha9Handler.Chunk): FormattedExecutionResult; + // (undocumented) + hasNext: boolean; +} + // @public (undocumented) export namespace NotImplementedHandler { // (undocumented) From 16b44574ff8d261833e5a2444c8741469ec9d00b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 00:26:31 -0600 Subject: [PATCH 075/254] Update size limits --- .size-limits.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.size-limits.json b/.size-limits.json index 7f303c892bf..c2bb067567d 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 43857, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 38699, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33415, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27498 + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44246, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39057, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33470, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27490 } From 108e9f01fc88723f4cf6de83e5593b23297c116f Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 11:19:03 -0600 Subject: [PATCH 076/254] Formatting --- .../__tests__/graphql17Alpha9/defer.test.ts | 20 ++++++++++++++----- .../__tests__/useBackgroundQuery.test.tsx | 2 -- .../deferGraphQL17Alpha9.test.tsx | 2 +- .../useMutation/defer20220824.test.tsx | 1 - .../__tests__/createQueryPreloader.test.tsx | 1 - .../defer20220824.test.tsx | 1 - .../deferGraphQL17Alpha9.test.tsx | 2 +- 7 files changed, 17 insertions(+), 12 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts index fdf145ed68e..a2aed33b372 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts @@ -794,7 +794,9 @@ describe("graphql-js test cases", () => { a { ... @defer { b { - c { d } + c { + d + } } } } @@ -804,7 +806,9 @@ describe("graphql-js test cases", () => { someField ... @defer { b { - e { f } + e { + f + } } } } @@ -891,7 +895,9 @@ describe("graphql-js test cases", () => { a { ... @defer { b { - c { d } + c { + d + } } } } @@ -900,8 +906,12 @@ describe("graphql-js test cases", () => { a { ... @defer { b { - c { d } - e { f } + c { + d + } + e { + f + } } } } diff --git a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx index d1c0db9e893..6e4c07f6e7f 100644 --- a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx @@ -1390,7 +1390,6 @@ it("works with startTransition to change variables", async () => { } }); - it("reacts to cache updates", async () => { const { query, mocks } = setupSimpleCase(); @@ -3671,7 +3670,6 @@ it('suspends and does not use partial data when changing variables and using a " await expect(renderStream).not.toRerender({ timeout: 50 }); }); - it.each([ "cache-first", "network-only", diff --git a/src/react/hooks/__tests__/useLoadableQuery/deferGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useLoadableQuery/deferGraphQL17Alpha9.test.tsx index d43bdb16e73..c4fee82fef3 100644 --- a/src/react/hooks/__tests__/useLoadableQuery/deferGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useLoadableQuery/deferGraphQL17Alpha9.test.tsx @@ -403,4 +403,4 @@ test('does not suspend deferred queries with partial data in the cache and using } await expect(takeRender).not.toRerender(); -}); \ No newline at end of file +}); diff --git a/src/react/hooks/__tests__/useMutation/defer20220824.test.tsx b/src/react/hooks/__tests__/useMutation/defer20220824.test.tsx index 42ebb4e9cde..5319ccdc587 100644 --- a/src/react/hooks/__tests__/useMutation/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useMutation/defer20220824.test.tsx @@ -387,4 +387,3 @@ test("calls the update function with the final merged result data", async () => expect(console.error).not.toHaveBeenCalled(); }); - diff --git a/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx b/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx index 96f4838e0d3..f09581c7dfd 100644 --- a/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx +++ b/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx @@ -1806,7 +1806,6 @@ test("does not suspend and returns partial data when `returnPartialData` is `tru } }); - test("masks result when dataMasking is `true`", async () => { const { query, mocks } = setupMaskedVariablesCase(); const client = new ApolloClient({ diff --git a/src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx b/src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx index 196afa27533..024033c91ff 100644 --- a/src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx +++ b/src/react/query-preloader/__tests__/createQueryPreloader/defer20220824.test.tsx @@ -167,4 +167,3 @@ test("suspends deferred queries until initial chunk loads then rerenders with de }); } }); - diff --git a/src/react/query-preloader/__tests__/createQueryPreloader/deferGraphQL17Alpha9.test.tsx b/src/react/query-preloader/__tests__/createQueryPreloader/deferGraphQL17Alpha9.test.tsx index c62bfeb7dc6..5917f770217 100644 --- a/src/react/query-preloader/__tests__/createQueryPreloader/deferGraphQL17Alpha9.test.tsx +++ b/src/react/query-preloader/__tests__/createQueryPreloader/deferGraphQL17Alpha9.test.tsx @@ -168,4 +168,4 @@ test("suspends deferred queries until initial chunk loads then rerenders with de networkStatus: NetworkStatus.ready, }); } -}); \ No newline at end of file +}); From bfacd3eb25b1d6d38f02cba899cea86017f1935d Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 11:59:54 -0600 Subject: [PATCH 077/254] Fix lint errors --- .../handlers/__tests__/graphql17Alpha9/defer.test.ts | 2 +- .../query-preloader/__tests__/createQueryPreloader.test.tsx | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts index a2aed33b372..4462476513e 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts @@ -25,6 +25,7 @@ import { NetworkStatus, Observable, } from "@apollo/client"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { markAsStreaming, mockDefer20220824, @@ -33,7 +34,6 @@ import { } from "@apollo/client/testing/internal"; import { - GraphQL17Alpha9Handler, hasIncrementalChunks, // eslint-disable-next-line local-rules/no-relative-imports } from "../../graphql17Alpha9.js"; diff --git a/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx b/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx index f09581c7dfd..023f3e9ffc3 100644 --- a/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx +++ b/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx @@ -25,7 +25,6 @@ import { InMemoryCache, NetworkStatus, } from "@apollo/client"; -import { Defer20220824Handler } from "@apollo/client/incremental"; import type { PreloadedQueryRef, QueryRef } from "@apollo/client/react"; import { ApolloProvider, @@ -33,7 +32,7 @@ import { useReadQuery, } from "@apollo/client/react"; import { unwrapQueryRef } from "@apollo/client/react/internal"; -import { MockLink, MockSubscriptionLink } from "@apollo/client/testing"; +import { MockLink } from "@apollo/client/testing"; import type { MaskedVariablesCaseData, SimpleCaseData, @@ -41,7 +40,6 @@ import type { } from "@apollo/client/testing/internal"; import { createClientWrapper, - markAsStreaming, renderHookAsync, setupMaskedVariablesCase, setupSimpleCase, From 5c51186bda6bff23bab5f2438a3ee9db1cf54390 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 12:08:31 -0600 Subject: [PATCH 078/254] Remove unused imports --- src/react/hooks/__tests__/useQuery/defer20220824.test.tsx | 1 - src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx | 1 - 2 files changed, 2 deletions(-) diff --git a/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx index a43a83dc428..d15c2e78200 100644 --- a/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useQuery/defer20220824.test.tsx @@ -2,7 +2,6 @@ import { disableActEnvironment, renderHookToSnapshotStream, } from "@testing-library/react-render-stream"; -import React from "react"; import { ApolloClient, diff --git a/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx b/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx index 60db1cde900..0e967508933 100644 --- a/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx +++ b/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx @@ -2,7 +2,6 @@ import { disableActEnvironment, renderHookToSnapshotStream, } from "@testing-library/react-render-stream"; -import React from "react"; import { ApolloClient, From cb0e021f2f7d41b377f3f42c67520698fccb2b1b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 12:08:44 -0600 Subject: [PATCH 079/254] Fix useSuspenseQuery test that just emitted errors --- .../useSuspenseQuery/deferGraphQL17Alpha9.test.tsx | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx index 063aa94590c..51770ed06e9 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx @@ -1717,11 +1717,14 @@ test("throws graphql errors returned by deferred queries", async () => { } `; - const { httpLink, enqueueInitialChunk } = mockDeferStreamGraphQL17Alpha9(); - const client = new ApolloClient({ cache: new InMemoryCache(), - link: httpLink, + link: new ApolloLink(() => { + return of({ + data: null, + errors: [{ message: "Could not fetch greeting" }], + }).pipe(delay(20)); + }), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -1739,11 +1742,6 @@ test("throws graphql errors returned by deferred queries", async () => { expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); } - enqueueInitialChunk({ - errors: [{ message: "Could not fetch greeting" }], - hasNext: false, - }); - { const { snapshot, renderedComponents } = await takeRender(); From 8a8927ffc4b66dc578cc2859e5d331d306d8b497 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 12:22:35 -0600 Subject: [PATCH 080/254] Move `@defer` tests from ApolloClient/* to client.watchQuery/defer20220824 --- .../__tests__/ApolloClient/general.test.ts | 156 ----- .../ApolloClient/multiple-results.test.ts | 400 ------------- .../client.watchQuery/defer20220824.test.ts | 558 ++++++++++++++++++ 3 files changed, 558 insertions(+), 556 deletions(-) delete mode 100644 src/core/__tests__/ApolloClient/multiple-results.test.ts create mode 100644 src/core/__tests__/client.watchQuery/defer20220824.test.ts diff --git a/src/core/__tests__/ApolloClient/general.test.ts b/src/core/__tests__/ApolloClient/general.test.ts index 49ecefaa437..588141c8e8a 100644 --- a/src/core/__tests__/ApolloClient/general.test.ts +++ b/src/core/__tests__/ApolloClient/general.test.ts @@ -9,12 +9,10 @@ import type { ObservableQuery, TypedDocumentNode } from "@apollo/client"; import { ApolloClient, NetworkStatus } from "@apollo/client"; import { InMemoryCache } from "@apollo/client/cache"; import { CombinedGraphQLErrors } from "@apollo/client/errors"; -import { Defer20220824Handler } from "@apollo/client/incremental"; import { ApolloLink } from "@apollo/client/link"; import { ClientAwarenessLink } from "@apollo/client/link/client-awareness"; import { MockLink } from "@apollo/client/testing"; import { - mockDefer20220824, ObservableStream, spyOnConsole, wait, @@ -7548,160 +7546,6 @@ describe("ApolloClient", () => { ) ).toBeUndefined(); }); - - it("deduplicates queries as long as a query still has deferred chunks", async () => { - const query = gql` - query LazyLoadLuke { - people(id: 1) { - id - name - friends { - id - ... @defer { - name - } - } - } - } - `; - - const outgoingRequestSpy = jest.fn(((operation, forward) => - forward(operation)) satisfies ApolloLink.RequestHandler); - const defer = mockDefer20220824(); - const client = new ApolloClient({ - cache: new InMemoryCache({}), - link: new ApolloLink(outgoingRequestSpy).concat(defer.httpLink), - incrementalHandler: new Defer20220824Handler(), - }); - - const query1 = new ObservableStream( - client.watchQuery({ query, fetchPolicy: "network-only" }) - ); - const query2 = new ObservableStream( - client.watchQuery({ query, fetchPolicy: "network-only" }) - ); - expect(outgoingRequestSpy).toHaveBeenCalledTimes(1); - - const initialData = { - people: { - __typename: "Person", - id: 1, - name: "Luke", - friends: [ - { - __typename: "Person", - id: 5, - } as { __typename: "Person"; id: number; name?: string }, - { - __typename: "Person", - id: 8, - } as { __typename: "Person"; id: number; name?: string }, - ], - }, - }; - const initialResult: ObservableQuery.Result = { - data: initialData, - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - partial: true, - }; - - defer.enqueueInitialChunk({ - data: initialData, - hasNext: true, - }); - - await expect(query1).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - await expect(query2).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - await expect(query1).toEmitTypedValue(initialResult); - await expect(query2).toEmitTypedValue(initialResult); - - const query3 = new ObservableStream( - client.watchQuery({ query, fetchPolicy: "network-only" }) - ); - await expect(query3).toEmitTypedValue(initialResult); - expect(outgoingRequestSpy).toHaveBeenCalledTimes(1); - - const firstChunk = { - incremental: [ - { - data: { - name: "Leia", - }, - path: ["people", "friends", 0], - }, - ], - hasNext: true, - }; - const resultAfterFirstChunk = structuredClone( - initialResult - ) as ObservableQuery.Result; - resultAfterFirstChunk.data.people.friends[0].name = "Leia"; - - defer.enqueueSubsequentChunk(firstChunk); - - await expect(query1).toEmitTypedValue(resultAfterFirstChunk); - await expect(query2).toEmitTypedValue(resultAfterFirstChunk); - await expect(query3).toEmitTypedValue(resultAfterFirstChunk); - - const query4 = new ObservableStream( - client.watchQuery({ query, fetchPolicy: "network-only" }) - ); - await expect(query4).toEmitTypedValue(resultAfterFirstChunk); - expect(outgoingRequestSpy).toHaveBeenCalledTimes(1); - - const secondChunk = { - incremental: [ - { - data: { - name: "Han Solo", - }, - path: ["people", "friends", 1], - }, - ], - hasNext: false, - }; - const resultAfterSecondChunk = { - ...structuredClone(resultAfterFirstChunk), - loading: false, - networkStatus: NetworkStatus.ready, - dataState: "complete", - partial: false, - } as ObservableQuery.Result; - resultAfterSecondChunk.data.people.friends[1].name = "Han Solo"; - - defer.enqueueSubsequentChunk(secondChunk); - - await expect(query1).toEmitTypedValue(resultAfterSecondChunk); - await expect(query2).toEmitTypedValue(resultAfterSecondChunk); - await expect(query3).toEmitTypedValue(resultAfterSecondChunk); - await expect(query4).toEmitTypedValue(resultAfterSecondChunk); - - // TODO: Re-enable once below condition can be met - /* const query5 = */ new ObservableStream( - client.watchQuery({ query, fetchPolicy: "network-only" }) - ); - // TODO: Re-enable once notifyOnNetworkStatusChange controls whether we - // get the loading state. This test fails with the switch to RxJS for now - // since the initial value is emitted synchronously unlike zen-observable - // where the emitted result wasn't emitted until after this assertion. - // expect(query5).not.toEmitAnything(); - expect(outgoingRequestSpy).toHaveBeenCalledTimes(2); - }); }); describe("missing cache field warnings", () => { diff --git a/src/core/__tests__/ApolloClient/multiple-results.test.ts b/src/core/__tests__/ApolloClient/multiple-results.test.ts deleted file mode 100644 index 466e02c920e..00000000000 --- a/src/core/__tests__/ApolloClient/multiple-results.test.ts +++ /dev/null @@ -1,400 +0,0 @@ -import { GraphQLError } from "graphql"; -import { gql } from "graphql-tag"; - -import { ApolloClient, NetworkStatus } from "@apollo/client"; -import { InMemoryCache } from "@apollo/client/cache"; -import { Defer20220824Handler } from "@apollo/client/incremental"; -import { MockSubscriptionLink } from "@apollo/client/testing"; -import { ObservableStream, wait } from "@apollo/client/testing/internal"; - -describe("mutiple results", () => { - it("allows multiple query results from link", async () => { - const query = gql` - query LazyLoadLuke { - people_one(id: 1) { - name - friends @defer { - name - } - } - } - `; - - const initialData = { - people_one: { - name: "Luke Skywalker", - friends: null, - }, - }; - - const laterData = { - people_one: { - // XXX true defer's wouldn't send this - name: "Luke Skywalker", - friends: [{ name: "Leia Skywalker" }], - }, - }; - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const observable = client.watchQuery({ - query, - variables: {}, - }); - const stream = new ObservableStream(observable); - - await expect(stream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - // fire off first result - link.simulateResult({ result: { data: initialData } }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - - link.simulateResult({ result: { data: laterData } }); - - await expect(stream).toEmitTypedValue({ - data: laterData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - }); - - it("allows multiple query results from link with ignored errors", async () => { - const query = gql` - query LazyLoadLuke { - people_one(id: 1) { - name - friends @defer { - name - } - } - } - `; - - const initialData = { - people_one: { - name: "Luke Skywalker", - friends: null, - }, - }; - - const laterData = { - people_one: { - // XXX true defer's wouldn't send this - name: "Luke Skywalker", - friends: [{ name: "Leia Skywalker" }], - }, - }; - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const observable = client.watchQuery({ - query, - variables: {}, - errorPolicy: "ignore", - }); - const stream = new ObservableStream(observable); - - await expect(stream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - // fire off first result - link.simulateResult({ result: { data: initialData } }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - - link.simulateResult({ - result: { errors: [new GraphQLError("defer failed")] }, - }); - - await expect(stream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: false, - networkStatus: 7, - partial: true, - }); - - await wait(20); - link.simulateResult({ result: { data: laterData } }); - - await expect(stream).toEmitTypedValue({ - data: laterData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - }); - - it("strips errors from a result if ignored", async () => { - const query = gql` - query LazyLoadLuke { - people_one(id: 1) { - name - friends @defer { - name - } - } - } - `; - - const initialData = { - people_one: { - name: "Luke Skywalker", - friends: null, - }, - }; - - const laterData = { - people_one: { - // XXX true defer's wouldn't send this - name: "Luke Skywalker", - friends: [{ name: "Leia Skywalker" }], - }, - }; - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const observable = client.watchQuery({ - query, - variables: {}, - errorPolicy: "ignore", - }); - const stream = new ObservableStream(observable); - - await expect(stream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - // fire off first result - link.simulateResult({ result: { data: initialData } }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - - // this should fire the `next` event without this error - link.simulateResult({ - result: { - errors: [new GraphQLError("defer failed")], - data: laterData, - }, - }); - - await expect(stream).toEmitTypedValue({ - data: laterData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - }); - - it.skip("allows multiple query results from link with all errors", async () => { - const query = gql` - query LazyLoadLuke { - people_one(id: 1) { - name - friends @defer { - name - } - } - } - `; - - const initialData = { - people_one: { - name: "Luke Skywalker", - friends: null, - }, - }; - - const laterData = { - people_one: { - // XXX true defer's wouldn't send this - name: "Luke Skywalker", - friends: [{ name: "Leia Skywalker" }], - }, - }; - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const observable = client.watchQuery({ - query, - variables: {}, - errorPolicy: "all", - }); - const stream = new ObservableStream(observable); - - // fire off first result - link.simulateResult({ result: { data: initialData } }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - - // this should fire the next event again - link.simulateResult({ - error: new Error("defer failed"), - }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - error: new Error("defer failed"), - partial: false, - }); - - link.simulateResult({ result: { data: laterData } }); - - await expect(stream).toEmitTypedValue({ - data: laterData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - }); - - it("emits error if an error is set with the none policy", async () => { - const query = gql` - query LazyLoadLuke { - people_one(id: 1) { - name - friends @defer { - name - } - } - } - `; - - const initialData = { - people_one: { - name: "Luke Skywalker", - friends: null, - }, - }; - - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const observable = client.watchQuery({ - query, - variables: {}, - // errorPolicy: 'none', // this is the default - }); - const stream = new ObservableStream(observable); - - let count = 0; - observable.subscribe({ - next: (result) => { - // errors should never be passed since they are ignored - count++; - // loading - if (count === 1) { - expect(result.error).toBeUndefined(); - } - // first result - if (count === 2) { - expect(result.error).toBeUndefined(); - } - // error - if (count === 3) { - expect(result.error).toBeDefined(); - } - }, - }); - - await expect(stream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - // fire off first result - link.simulateResult({ result: { data: initialData } }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - - link.simulateResult({ error: new Error("defer failed") }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - error: new Error("defer failed"), - loading: false, - networkStatus: NetworkStatus.error, - partial: false, - }); - - await expect(stream).not.toEmitAnything(); - }); -}); diff --git a/src/core/__tests__/client.watchQuery/defer20220824.test.ts b/src/core/__tests__/client.watchQuery/defer20220824.test.ts new file mode 100644 index 00000000000..c142553980f --- /dev/null +++ b/src/core/__tests__/client.watchQuery/defer20220824.test.ts @@ -0,0 +1,558 @@ +import { GraphQLError } from "graphql"; +import { gql } from "graphql-tag"; + +import type { ObservableQuery } from "@apollo/client"; +import { ApolloClient, NetworkStatus } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import { ApolloLink } from "@apollo/client/link"; +import { MockSubscriptionLink } from "@apollo/client/testing"; +import { + mockDefer20220824, + ObservableStream, + wait, +} from "@apollo/client/testing/internal"; + +test("allows multiple query results from link", async () => { + const query = gql` + query LazyLoadLuke { + people_one(id: 1) { + name + friends @defer { + name + } + } + } + `; + + const initialData = { + people_one: { + name: "Luke Skywalker", + friends: null, + }, + }; + + const laterData = { + people_one: { + // XXX true defer's wouldn't send this + name: "Luke Skywalker", + friends: [{ name: "Leia Skywalker" }], + }, + }; + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ + query, + variables: {}, + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + // fire off first result + link.simulateResult({ result: { data: initialData } }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + + link.simulateResult({ result: { data: laterData } }); + + await expect(stream).toEmitTypedValue({ + data: laterData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); +}); + +test("allows multiple query results from link with ignored errors", async () => { + const query = gql` + query LazyLoadLuke { + people_one(id: 1) { + name + friends @defer { + name + } + } + } + `; + + const initialData = { + people_one: { + name: "Luke Skywalker", + friends: null, + }, + }; + + const laterData = { + people_one: { + // XXX true defer's wouldn't send this + name: "Luke Skywalker", + friends: [{ name: "Leia Skywalker" }], + }, + }; + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ + query, + variables: {}, + errorPolicy: "ignore", + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + // fire off first result + link.simulateResult({ result: { data: initialData } }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + + link.simulateResult({ + result: { errors: [new GraphQLError("defer failed")] }, + }); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: false, + networkStatus: 7, + partial: true, + }); + + await wait(20); + link.simulateResult({ result: { data: laterData } }); + + await expect(stream).toEmitTypedValue({ + data: laterData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); +}); + +test("strips errors from a result if ignored", async () => { + const query = gql` + query LazyLoadLuke { + people_one(id: 1) { + name + friends @defer { + name + } + } + } + `; + + const initialData = { + people_one: { + name: "Luke Skywalker", + friends: null, + }, + }; + + const laterData = { + people_one: { + // XXX true defer's wouldn't send this + name: "Luke Skywalker", + friends: [{ name: "Leia Skywalker" }], + }, + }; + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ + query, + variables: {}, + errorPolicy: "ignore", + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + // fire off first result + link.simulateResult({ result: { data: initialData } }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + + // this should fire the `next` event without this error + link.simulateResult({ + result: { + errors: [new GraphQLError("defer failed")], + data: laterData, + }, + }); + + await expect(stream).toEmitTypedValue({ + data: laterData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); +}); + +test.skip("allows multiple query results from link with all errors", async () => { + const query = gql` + query LazyLoadLuke { + people_one(id: 1) { + name + friends @defer { + name + } + } + } + `; + + const initialData = { + people_one: { + name: "Luke Skywalker", + friends: null, + }, + }; + + const laterData = { + people_one: { + // XXX true defer's wouldn't send this + name: "Luke Skywalker", + friends: [{ name: "Leia Skywalker" }], + }, + }; + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ + query, + variables: {}, + errorPolicy: "all", + }); + const stream = new ObservableStream(observable); + + // fire off first result + link.simulateResult({ result: { data: initialData } }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + + // this should fire the next event again + link.simulateResult({ + error: new Error("defer failed"), + }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + error: new Error("defer failed"), + partial: false, + }); + + link.simulateResult({ result: { data: laterData } }); + + await expect(stream).toEmitTypedValue({ + data: laterData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); +}); + +test("emits error if an error is set with the none policy", async () => { + const query = gql` + query LazyLoadLuke { + people_one(id: 1) { + name + friends @defer { + name + } + } + } + `; + + const initialData = { + people_one: { + name: "Luke Skywalker", + friends: null, + }, + }; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ + query, + variables: {}, + // errorPolicy: 'none', // this is the default + }); + const stream = new ObservableStream(observable); + + let count = 0; + observable.subscribe({ + next: (result) => { + // errors should never be passed since they are ignored + count++; + // loading + if (count === 1) { + expect(result.error).toBeUndefined(); + } + // first result + if (count === 2) { + expect(result.error).toBeUndefined(); + } + // error + if (count === 3) { + expect(result.error).toBeDefined(); + } + }, + }); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + // fire off first result + link.simulateResult({ result: { data: initialData } }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + + link.simulateResult({ error: new Error("defer failed") }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + error: new Error("defer failed"), + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(stream).not.toEmitAnything(); +}); + +test("deduplicates queries as long as a query still has deferred chunks", async () => { + const query = gql` + query LazyLoadLuke { + people(id: 1) { + id + name + friends { + id + ... @defer { + name + } + } + } + } + `; + + const outgoingRequestSpy = jest.fn(((operation, forward) => + forward(operation)) satisfies ApolloLink.RequestHandler); + const defer = mockDefer20220824(); + const client = new ApolloClient({ + cache: new InMemoryCache({}), + link: new ApolloLink(outgoingRequestSpy).concat(defer.httpLink), + incrementalHandler: new Defer20220824Handler(), + }); + + const query1 = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "network-only" }) + ); + const query2 = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "network-only" }) + ); + expect(outgoingRequestSpy).toHaveBeenCalledTimes(1); + + const initialData = { + people: { + __typename: "Person", + id: 1, + name: "Luke", + friends: [ + { + __typename: "Person", + id: 5, + } as { __typename: "Person"; id: number; name?: string }, + { + __typename: "Person", + id: 8, + } as { __typename: "Person"; id: number; name?: string }, + ], + }, + }; + const initialResult: ObservableQuery.Result = { + data: initialData, + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }; + + defer.enqueueInitialChunk({ + data: initialData, + hasNext: true, + }); + + await expect(query1).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + await expect(query2).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(query1).toEmitTypedValue(initialResult); + await expect(query2).toEmitTypedValue(initialResult); + + const query3 = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "network-only" }) + ); + await expect(query3).toEmitTypedValue(initialResult); + expect(outgoingRequestSpy).toHaveBeenCalledTimes(1); + + const firstChunk = { + incremental: [ + { + data: { + name: "Leia", + }, + path: ["people", "friends", 0], + }, + ], + hasNext: true, + }; + const resultAfterFirstChunk = structuredClone( + initialResult + ) as ObservableQuery.Result; + resultAfterFirstChunk.data.people.friends[0].name = "Leia"; + + defer.enqueueSubsequentChunk(firstChunk); + + await expect(query1).toEmitTypedValue(resultAfterFirstChunk); + await expect(query2).toEmitTypedValue(resultAfterFirstChunk); + await expect(query3).toEmitTypedValue(resultAfterFirstChunk); + + const query4 = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "network-only" }) + ); + await expect(query4).toEmitTypedValue(resultAfterFirstChunk); + expect(outgoingRequestSpy).toHaveBeenCalledTimes(1); + + const secondChunk = { + incremental: [ + { + data: { + name: "Han Solo", + }, + path: ["people", "friends", 1], + }, + ], + hasNext: false, + }; + const resultAfterSecondChunk = { + ...structuredClone(resultAfterFirstChunk), + loading: false, + networkStatus: NetworkStatus.ready, + dataState: "complete", + partial: false, + } as ObservableQuery.Result; + resultAfterSecondChunk.data.people.friends[1].name = "Han Solo"; + + defer.enqueueSubsequentChunk(secondChunk); + + await expect(query1).toEmitTypedValue(resultAfterSecondChunk); + await expect(query2).toEmitTypedValue(resultAfterSecondChunk); + await expect(query3).toEmitTypedValue(resultAfterSecondChunk); + await expect(query4).toEmitTypedValue(resultAfterSecondChunk); + + // TODO: Re-enable once below condition can be met + /* const query5 = */ new ObservableStream( + client.watchQuery({ query, fetchPolicy: "network-only" }) + ); + // TODO: Re-enable once notifyOnNetworkStatusChange controls whether we + // get the loading state. This test fails with the switch to RxJS for now + // since the initial value is emitted synchronously unlike zen-observable + // where the emitted result wasn't emitted until after this assertion. + // expect(query5).not.toEmitAnything(); + expect(outgoingRequestSpy).toHaveBeenCalledTimes(2); +}); From a8231c0c072dd805f34c3589349d1ded4d9e05cc Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 18:30:20 -0600 Subject: [PATCH 081/254] Restore multiple-results test --- .../ApolloClient/multiple-results.test.ts | 400 ++++++++++++++++++ 1 file changed, 400 insertions(+) create mode 100644 src/core/__tests__/ApolloClient/multiple-results.test.ts diff --git a/src/core/__tests__/ApolloClient/multiple-results.test.ts b/src/core/__tests__/ApolloClient/multiple-results.test.ts new file mode 100644 index 00000000000..466e02c920e --- /dev/null +++ b/src/core/__tests__/ApolloClient/multiple-results.test.ts @@ -0,0 +1,400 @@ +import { GraphQLError } from "graphql"; +import { gql } from "graphql-tag"; + +import { ApolloClient, NetworkStatus } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import { MockSubscriptionLink } from "@apollo/client/testing"; +import { ObservableStream, wait } from "@apollo/client/testing/internal"; + +describe("mutiple results", () => { + it("allows multiple query results from link", async () => { + const query = gql` + query LazyLoadLuke { + people_one(id: 1) { + name + friends @defer { + name + } + } + } + `; + + const initialData = { + people_one: { + name: "Luke Skywalker", + friends: null, + }, + }; + + const laterData = { + people_one: { + // XXX true defer's wouldn't send this + name: "Luke Skywalker", + friends: [{ name: "Leia Skywalker" }], + }, + }; + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ + query, + variables: {}, + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + // fire off first result + link.simulateResult({ result: { data: initialData } }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + + link.simulateResult({ result: { data: laterData } }); + + await expect(stream).toEmitTypedValue({ + data: laterData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + }); + + it("allows multiple query results from link with ignored errors", async () => { + const query = gql` + query LazyLoadLuke { + people_one(id: 1) { + name + friends @defer { + name + } + } + } + `; + + const initialData = { + people_one: { + name: "Luke Skywalker", + friends: null, + }, + }; + + const laterData = { + people_one: { + // XXX true defer's wouldn't send this + name: "Luke Skywalker", + friends: [{ name: "Leia Skywalker" }], + }, + }; + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ + query, + variables: {}, + errorPolicy: "ignore", + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + // fire off first result + link.simulateResult({ result: { data: initialData } }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + + link.simulateResult({ + result: { errors: [new GraphQLError("defer failed")] }, + }); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: false, + networkStatus: 7, + partial: true, + }); + + await wait(20); + link.simulateResult({ result: { data: laterData } }); + + await expect(stream).toEmitTypedValue({ + data: laterData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + }); + + it("strips errors from a result if ignored", async () => { + const query = gql` + query LazyLoadLuke { + people_one(id: 1) { + name + friends @defer { + name + } + } + } + `; + + const initialData = { + people_one: { + name: "Luke Skywalker", + friends: null, + }, + }; + + const laterData = { + people_one: { + // XXX true defer's wouldn't send this + name: "Luke Skywalker", + friends: [{ name: "Leia Skywalker" }], + }, + }; + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ + query, + variables: {}, + errorPolicy: "ignore", + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + // fire off first result + link.simulateResult({ result: { data: initialData } }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + + // this should fire the `next` event without this error + link.simulateResult({ + result: { + errors: [new GraphQLError("defer failed")], + data: laterData, + }, + }); + + await expect(stream).toEmitTypedValue({ + data: laterData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + }); + + it.skip("allows multiple query results from link with all errors", async () => { + const query = gql` + query LazyLoadLuke { + people_one(id: 1) { + name + friends @defer { + name + } + } + } + `; + + const initialData = { + people_one: { + name: "Luke Skywalker", + friends: null, + }, + }; + + const laterData = { + people_one: { + // XXX true defer's wouldn't send this + name: "Luke Skywalker", + friends: [{ name: "Leia Skywalker" }], + }, + }; + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ + query, + variables: {}, + errorPolicy: "all", + }); + const stream = new ObservableStream(observable); + + // fire off first result + link.simulateResult({ result: { data: initialData } }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + + // this should fire the next event again + link.simulateResult({ + error: new Error("defer failed"), + }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + error: new Error("defer failed"), + partial: false, + }); + + link.simulateResult({ result: { data: laterData } }); + + await expect(stream).toEmitTypedValue({ + data: laterData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + }); + + it("emits error if an error is set with the none policy", async () => { + const query = gql` + query LazyLoadLuke { + people_one(id: 1) { + name + friends @defer { + name + } + } + } + `; + + const initialData = { + people_one: { + name: "Luke Skywalker", + friends: null, + }, + }; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ + query, + variables: {}, + // errorPolicy: 'none', // this is the default + }); + const stream = new ObservableStream(observable); + + let count = 0; + observable.subscribe({ + next: (result) => { + // errors should never be passed since they are ignored + count++; + // loading + if (count === 1) { + expect(result.error).toBeUndefined(); + } + // first result + if (count === 2) { + expect(result.error).toBeUndefined(); + } + // error + if (count === 3) { + expect(result.error).toBeDefined(); + } + }, + }); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + // fire off first result + link.simulateResult({ result: { data: initialData } }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + loading: false, + networkStatus: 7, + partial: false, + }); + + link.simulateResult({ error: new Error("defer failed") }); + + await expect(stream).toEmitTypedValue({ + data: initialData, + dataState: "complete", + error: new Error("defer failed"), + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(stream).not.toEmitAnything(); + }); +}); From 5e852487cdb34c02769915bc8d9ef6cd31ad5648 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 18:30:41 -0600 Subject: [PATCH 082/254] Remove multiple-results tests from defer tests --- .../client.watchQuery/defer20220824.test.ts | 393 ------------------ 1 file changed, 393 deletions(-) diff --git a/src/core/__tests__/client.watchQuery/defer20220824.test.ts b/src/core/__tests__/client.watchQuery/defer20220824.test.ts index c142553980f..36c6ba5b8bb 100644 --- a/src/core/__tests__/client.watchQuery/defer20220824.test.ts +++ b/src/core/__tests__/client.watchQuery/defer20220824.test.ts @@ -1,4 +1,3 @@ -import { GraphQLError } from "graphql"; import { gql } from "graphql-tag"; import type { ObservableQuery } from "@apollo/client"; @@ -6,403 +5,11 @@ import { ApolloClient, NetworkStatus } from "@apollo/client"; import { InMemoryCache } from "@apollo/client/cache"; import { Defer20220824Handler } from "@apollo/client/incremental"; import { ApolloLink } from "@apollo/client/link"; -import { MockSubscriptionLink } from "@apollo/client/testing"; import { mockDefer20220824, ObservableStream, - wait, } from "@apollo/client/testing/internal"; -test("allows multiple query results from link", async () => { - const query = gql` - query LazyLoadLuke { - people_one(id: 1) { - name - friends @defer { - name - } - } - } - `; - - const initialData = { - people_one: { - name: "Luke Skywalker", - friends: null, - }, - }; - - const laterData = { - people_one: { - // XXX true defer's wouldn't send this - name: "Luke Skywalker", - friends: [{ name: "Leia Skywalker" }], - }, - }; - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const observable = client.watchQuery({ - query, - variables: {}, - }); - const stream = new ObservableStream(observable); - - await expect(stream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - // fire off first result - link.simulateResult({ result: { data: initialData } }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - - link.simulateResult({ result: { data: laterData } }); - - await expect(stream).toEmitTypedValue({ - data: laterData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); -}); - -test("allows multiple query results from link with ignored errors", async () => { - const query = gql` - query LazyLoadLuke { - people_one(id: 1) { - name - friends @defer { - name - } - } - } - `; - - const initialData = { - people_one: { - name: "Luke Skywalker", - friends: null, - }, - }; - - const laterData = { - people_one: { - // XXX true defer's wouldn't send this - name: "Luke Skywalker", - friends: [{ name: "Leia Skywalker" }], - }, - }; - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const observable = client.watchQuery({ - query, - variables: {}, - errorPolicy: "ignore", - }); - const stream = new ObservableStream(observable); - - await expect(stream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - // fire off first result - link.simulateResult({ result: { data: initialData } }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - - link.simulateResult({ - result: { errors: [new GraphQLError("defer failed")] }, - }); - - await expect(stream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: false, - networkStatus: 7, - partial: true, - }); - - await wait(20); - link.simulateResult({ result: { data: laterData } }); - - await expect(stream).toEmitTypedValue({ - data: laterData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); -}); - -test("strips errors from a result if ignored", async () => { - const query = gql` - query LazyLoadLuke { - people_one(id: 1) { - name - friends @defer { - name - } - } - } - `; - - const initialData = { - people_one: { - name: "Luke Skywalker", - friends: null, - }, - }; - - const laterData = { - people_one: { - // XXX true defer's wouldn't send this - name: "Luke Skywalker", - friends: [{ name: "Leia Skywalker" }], - }, - }; - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const observable = client.watchQuery({ - query, - variables: {}, - errorPolicy: "ignore", - }); - const stream = new ObservableStream(observable); - - await expect(stream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - // fire off first result - link.simulateResult({ result: { data: initialData } }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - - // this should fire the `next` event without this error - link.simulateResult({ - result: { - errors: [new GraphQLError("defer failed")], - data: laterData, - }, - }); - - await expect(stream).toEmitTypedValue({ - data: laterData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); -}); - -test.skip("allows multiple query results from link with all errors", async () => { - const query = gql` - query LazyLoadLuke { - people_one(id: 1) { - name - friends @defer { - name - } - } - } - `; - - const initialData = { - people_one: { - name: "Luke Skywalker", - friends: null, - }, - }; - - const laterData = { - people_one: { - // XXX true defer's wouldn't send this - name: "Luke Skywalker", - friends: [{ name: "Leia Skywalker" }], - }, - }; - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const observable = client.watchQuery({ - query, - variables: {}, - errorPolicy: "all", - }); - const stream = new ObservableStream(observable); - - // fire off first result - link.simulateResult({ result: { data: initialData } }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - - // this should fire the next event again - link.simulateResult({ - error: new Error("defer failed"), - }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - error: new Error("defer failed"), - partial: false, - }); - - link.simulateResult({ result: { data: laterData } }); - - await expect(stream).toEmitTypedValue({ - data: laterData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); -}); - -test("emits error if an error is set with the none policy", async () => { - const query = gql` - query LazyLoadLuke { - people_one(id: 1) { - name - friends @defer { - name - } - } - } - `; - - const initialData = { - people_one: { - name: "Luke Skywalker", - friends: null, - }, - }; - - const link = new MockSubscriptionLink(); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - incrementalHandler: new Defer20220824Handler(), - }); - - const observable = client.watchQuery({ - query, - variables: {}, - // errorPolicy: 'none', // this is the default - }); - const stream = new ObservableStream(observable); - - let count = 0; - observable.subscribe({ - next: (result) => { - // errors should never be passed since they are ignored - count++; - // loading - if (count === 1) { - expect(result.error).toBeUndefined(); - } - // first result - if (count === 2) { - expect(result.error).toBeUndefined(); - } - // error - if (count === 3) { - expect(result.error).toBeDefined(); - } - }, - }); - - await expect(stream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - // fire off first result - link.simulateResult({ result: { data: initialData } }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - loading: false, - networkStatus: 7, - partial: false, - }); - - link.simulateResult({ error: new Error("defer failed") }); - - await expect(stream).toEmitTypedValue({ - data: initialData, - dataState: "complete", - error: new Error("defer failed"), - loading: false, - networkStatus: NetworkStatus.error, - partial: false, - }); - - await expect(stream).not.toEmitAnything(); -}); - test("deduplicates queries as long as a query still has deferred chunks", async () => { const query = gql` query LazyLoadLuke { From bb6dd7161da569b1818d04d32ec8e5b5ac0074eb Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 18:31:00 -0600 Subject: [PATCH 083/254] Add new spec format tests --- .../deferGraphQL17Alpha9.test.ts | 175 ++++++++++++++++++ 1 file changed, 175 insertions(+) create mode 100644 src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts diff --git a/src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts new file mode 100644 index 00000000000..035ce0525df --- /dev/null +++ b/src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts @@ -0,0 +1,175 @@ +import { gql } from "graphql-tag"; + +import type { ObservableQuery } from "@apollo/client"; +import { ApolloClient, NetworkStatus } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import { ApolloLink } from "@apollo/client/link"; +import { + mockDeferStreamGraphQL17Alpha9, + ObservableStream, +} from "@apollo/client/testing/internal"; + +test("deduplicates queries as long as a query still has deferred chunks", async () => { + const query = gql` + query LazyLoadLuke { + people(id: 1) { + id + name + friends { + id + ... @defer { + name + } + } + } + } + `; + + const outgoingRequestSpy = jest.fn(((operation, forward) => + forward(operation)) satisfies ApolloLink.RequestHandler); + const defer = mockDeferStreamGraphQL17Alpha9(); + const client = new ApolloClient({ + cache: new InMemoryCache({}), + link: new ApolloLink(outgoingRequestSpy).concat(defer.httpLink), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query1 = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "network-only" }) + ); + const query2 = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "network-only" }) + ); + expect(outgoingRequestSpy).toHaveBeenCalledTimes(1); + + const initialData = { + people: { + __typename: "Person", + id: 1, + name: "Luke", + friends: [ + { + __typename: "Person", + id: 5, + } as { __typename: "Person"; id: number; name?: string }, + { + __typename: "Person", + id: 8, + } as { __typename: "Person"; id: number; name?: string }, + ], + }, + }; + const initialResult: ObservableQuery.Result = { + data: initialData, + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }; + + defer.enqueueInitialChunk({ + data: initialData, + pending: [ + { id: "0", path: ["people", "friends", 0] }, + { id: "1", path: ["people", "friends", 1] }, + ], + hasNext: true, + }); + + await expect(query1).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + await expect(query2).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(query1).toEmitTypedValue(initialResult); + await expect(query2).toEmitTypedValue(initialResult); + + const query3 = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "network-only" }) + ); + await expect(query3).toEmitTypedValue(initialResult); + expect(outgoingRequestSpy).toHaveBeenCalledTimes(1); + + const firstChunk: GraphQL17Alpha9Handler.SubsequentResult< + Record + > = { + incremental: [ + { + data: { + name: "Leia", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: true, + }; + const resultAfterFirstChunk = structuredClone( + initialResult + ) as ObservableQuery.Result; + resultAfterFirstChunk.data.people.friends[0].name = "Leia"; + + defer.enqueueSubsequentChunk(firstChunk); + + await expect(query1).toEmitTypedValue(resultAfterFirstChunk); + await expect(query2).toEmitTypedValue(resultAfterFirstChunk); + await expect(query3).toEmitTypedValue(resultAfterFirstChunk); + + const query4 = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "network-only" }) + ); + await expect(query4).toEmitTypedValue(resultAfterFirstChunk); + expect(outgoingRequestSpy).toHaveBeenCalledTimes(1); + + const secondChunk: GraphQL17Alpha9Handler.SubsequentResult< + Record + > = { + incremental: [ + { + data: { + name: "Han Solo", + }, + id: "1", + }, + ], + completed: [{ id: "1" }], + hasNext: false, + }; + const resultAfterSecondChunk = { + ...structuredClone(resultAfterFirstChunk), + loading: false, + networkStatus: NetworkStatus.ready, + dataState: "complete", + partial: false, + } as ObservableQuery.Result; + resultAfterSecondChunk.data.people.friends[1].name = "Han Solo"; + + defer.enqueueSubsequentChunk(secondChunk); + + await expect(query1).toEmitTypedValue(resultAfterSecondChunk); + await expect(query2).toEmitTypedValue(resultAfterSecondChunk); + await expect(query3).toEmitTypedValue(resultAfterSecondChunk); + await expect(query4).toEmitTypedValue(resultAfterSecondChunk); + + // TODO: Re-enable once below condition can be met + /* const query5 = */ new ObservableStream( + client.watchQuery({ query, fetchPolicy: "network-only" }) + ); + // TODO: Re-enable once notifyOnNetworkStatusChange controls whether we + // get the loading state. This test fails with the switch to RxJS for now + // since the initial value is emitted synchronously unlike zen-observable + // where the emitted result wasn't emitted until after this assertion. + // expect(query5).not.toEmitAnything(); + expect(outgoingRequestSpy).toHaveBeenCalledTimes(2); +}); From a9c5e7dcc2a7dd9d7287338edd2bad08be740433 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 5 Sep 2025 18:32:02 -0600 Subject: [PATCH 084/254] Remove defer in tests that don't test defer --- .../ApolloClient/multiple-results.test.ts | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/core/__tests__/ApolloClient/multiple-results.test.ts b/src/core/__tests__/ApolloClient/multiple-results.test.ts index 466e02c920e..1706bb859d7 100644 --- a/src/core/__tests__/ApolloClient/multiple-results.test.ts +++ b/src/core/__tests__/ApolloClient/multiple-results.test.ts @@ -13,7 +13,7 @@ describe("mutiple results", () => { query LazyLoadLuke { people_one(id: 1) { name - friends @defer { + friends { name } } @@ -29,7 +29,6 @@ describe("mutiple results", () => { const laterData = { people_one: { - // XXX true defer's wouldn't send this name: "Luke Skywalker", friends: [{ name: "Leia Skywalker" }], }, @@ -82,7 +81,7 @@ describe("mutiple results", () => { query LazyLoadLuke { people_one(id: 1) { name - friends @defer { + friends { name } } @@ -98,7 +97,6 @@ describe("mutiple results", () => { const laterData = { people_one: { - // XXX true defer's wouldn't send this name: "Luke Skywalker", friends: [{ name: "Leia Skywalker" }], }, @@ -165,7 +163,7 @@ describe("mutiple results", () => { query LazyLoadLuke { people_one(id: 1) { name - friends @defer { + friends { name } } @@ -181,7 +179,6 @@ describe("mutiple results", () => { const laterData = { people_one: { - // XXX true defer's wouldn't send this name: "Luke Skywalker", friends: [{ name: "Leia Skywalker" }], }, @@ -241,7 +238,7 @@ describe("mutiple results", () => { query LazyLoadLuke { people_one(id: 1) { name - friends @defer { + friends { name } } @@ -257,7 +254,6 @@ describe("mutiple results", () => { const laterData = { people_one: { - // XXX true defer's wouldn't send this name: "Luke Skywalker", friends: [{ name: "Leia Skywalker" }], }, @@ -317,7 +313,7 @@ describe("mutiple results", () => { query LazyLoadLuke { people_one(id: 1) { name - friends @defer { + friends { name } } From 8de454669bcdde26bf201837555d5ef60b76a79d Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 8 Sep 2025 10:19:43 -0600 Subject: [PATCH 085/254] Exclude useBackgroundQuery/useLoadableQuery subfolder tests from React 17 --- config/jest.config.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/config/jest.config.ts b/config/jest.config.ts index bf0997ccec5..4cd48b1fd7c 100644 --- a/config/jest.config.ts +++ b/config/jest.config.ts @@ -51,7 +51,9 @@ const react17TestFileIgnoreList = [ "src/react/hooks/__tests__/useSuspenseQuery.test.tsx", "src/react/hooks/__tests__/useSuspenseQuery/*", "src/react/hooks/__tests__/useBackgroundQuery.test.tsx", + "src/react/hooks/__tests__/useBackgroundQuery/*", "src/react/hooks/__tests__/useLoadableQuery.test.tsx", + "src/react/hooks/__tests__/useLoadableQuery/*", "src/react/hooks/__tests__/useQueryRefHandlers.test.tsx", "src/react/query-preloader/__tests__/createQueryPreloader.test.tsx", "src/react/ssr/__tests__/prerenderStatic.test.tsx", From 7512b6b720696a96d3254b90048a443e27c43175 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 8 Sep 2025 14:47:24 -0600 Subject: [PATCH 086/254] Update test with new test utils --- .../__tests__/graphql17Alpha9/defer.test.ts | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts index 4462476513e..b797d36a5c1 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts @@ -28,7 +28,7 @@ import { import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { markAsStreaming, - mockDefer20220824, + mockDeferStreamGraphQL17Alpha9, ObservableStream, wait, } from "@apollo/client/testing/internal"; @@ -2379,9 +2379,8 @@ test("GraphQL17Alpha9Handler can be used with `ApolloClient`", async () => { }); }); -// TODO: Add test helpers for new format -test.failing("merges cache updates that happen concurrently", async () => { - const stream = mockDefer20220824(); +test("merges cache updates that happen concurrently", async () => { + const stream = mockDeferStreamGraphQL17Alpha9(); const client = new ApolloClient({ link: stream.httpLink, cache: new InMemoryCache(), @@ -2418,6 +2417,7 @@ test.failing("merges cache updates that happen concurrently", async () => { job: "Farmer", }, }, + pending: [{ id: "0", path: ["hero"] }], hasNext: true, }); @@ -2453,9 +2453,10 @@ test.failing("merges cache updates that happen concurrently", async () => { data: { name: "Luke", }, - path: ["hero"], + id: "0", }, ], + completed: [{ id: "0" }], hasNext: false, }); @@ -2650,9 +2651,8 @@ test("stream that returns an error but continues to stream", async () => { }); }); -// TODO: Update to use test utils with updated types -test.skip("handles final chunk of { hasNext: false } correctly in usage with Apollo Client", async () => { - const stream = mockDefer20220824(); +test("handles final chunk of { hasNext: false } correctly in usage with Apollo Client", async () => { + const stream = mockDeferStreamGraphQL17Alpha9(); const client = new ApolloClient({ link: stream.httpLink, cache: new InMemoryCache(), @@ -2675,6 +2675,7 @@ test.skip("handles final chunk of { hasNext: false } correctly in usage with Apo data: { allProducts: [null, null, null], }, + pending: [], errors: [ { message: From 96120c56b80b90c82bd49a8596310391286761a8 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 8 Sep 2025 15:37:13 -0600 Subject: [PATCH 087/254] Update error message --- src/incremental/handlers/graphql17Alpha9.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 71bce701865..653914918fc 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -112,7 +112,7 @@ class IncrementalRequest const pending = this.pending.find(({ id }) => incremental.id === id); invariant( pending, - "Could not find pending chunk for incremental value. Please file an issue because this is a bug in Apollo Client." + "Could not find pending chunk for incremental value. Please file an issue for the Apollo Client team to investigate." ); let { data } = incremental; From 0772538413c265184d0d85d32a83f5e7850059b7 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 8 Sep 2025 15:39:06 -0600 Subject: [PATCH 088/254] Use filter instead of indexOf and splice --- src/incremental/handlers/graphql17Alpha9.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 653914918fc..a014c1b2897 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -136,8 +136,7 @@ class IncrementalRequest if ("completed" in chunk && chunk.completed) { for (const completed of chunk.completed) { - const index = this.pending.findIndex(({ id }) => id === completed.id); - this.pending.splice(index, 1); + this.pending = this.pending.filter(({ id }) => id !== completed.id); if (completed.errors) { this.errors.push(...completed.errors); From b72deb0a3119bc7448e6af09a01a75e58fd9d6de Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 8 Sep 2025 15:46:31 -0600 Subject: [PATCH 089/254] Add tests for the new format for useMutation --- .../useMutation/deferGraphQL17Alpha9.test.tsx | 388 ++++++++++++++++++ 1 file changed, 388 insertions(+) create mode 100644 src/react/hooks/__tests__/useMutation/deferGraphQL17Alpha9.test.tsx diff --git a/src/react/hooks/__tests__/useMutation/deferGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useMutation/deferGraphQL17Alpha9.test.tsx new file mode 100644 index 00000000000..6cf690be0ef --- /dev/null +++ b/src/react/hooks/__tests__/useMutation/deferGraphQL17Alpha9.test.tsx @@ -0,0 +1,388 @@ +import { + disableActEnvironment, + renderHookToSnapshotStream, +} from "@testing-library/react-render-stream"; +import { gql } from "graphql-tag"; + +import { ApolloClient, CombinedGraphQLErrors } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import { useMutation } from "@apollo/client/react"; +import { + createClientWrapper, + mockDeferStreamGraphQL17Alpha9, + spyOnConsole, +} from "@apollo/client/testing/internal"; + +const CREATE_TODO_ERROR = "Failed to create item"; + +test("resolves a deferred mutation with the full result", async () => { + using _ = spyOnConsole("error"); + const mutation = gql` + mutation createTodo($description: String!, $priority: String) { + createTodo(description: $description, priority: $priority) { + id + ... @defer { + description + priority + } + } + } + `; + const variables = { + description: "Get milk!", + }; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation), + { wrapper: createClientWrapper(client) } + ); + + { + const [, mutation] = await takeSnapshot(); + + expect(mutation).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [mutate] = getCurrentSnapshot(); + + const promise = mutate({ variables }); + + { + const [, mutation] = await takeSnapshot(); + + expect(mutation).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + enqueueInitialChunk({ + data: { + createTodo: { + id: 1, + __typename: "Todo", + }, + }, + pending: [{ id: "0", path: ["createTodo"] }], + hasNext: true, + }); + + await expect(takeSnapshot).not.toRerender(); + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + { + const [, mutation] = await takeSnapshot(); + + expect(mutation).toStrictEqualTyped({ + data: { + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + }, + error: undefined, + loading: false, + called: true, + }); + } + + await expect(promise).resolves.toStrictEqualTyped({ + data: { + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + }, + }); + + expect(console.error).not.toHaveBeenCalled(); +}); + +test("resolves with resulting errors and calls onError callback", async () => { + using _ = spyOnConsole("error"); + const mutation = gql` + mutation createTodo($description: String!, $priority: String) { + createTodo(description: $description, priority: $priority) { + id + ... @defer { + description + priority + } + } + } + `; + const variables = { + description: "Get milk!", + }; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const onError = jest.fn(); + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation, { onError }), + { + wrapper: createClientWrapper(client), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [createTodo] = getCurrentSnapshot(); + + const promise = createTodo({ variables }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + enqueueInitialChunk({ + data: { + createTodo: { + id: 1, + __typename: "Todo", + }, + }, + pending: [{ id: "0", path: ["createTodo"] }], + hasNext: true, + }); + + await expect(takeSnapshot).not.toRerender(); + + enqueueSubsequentChunk({ + completed: [{ id: "0", errors: [{ message: CREATE_TODO_ERROR }] }], + hasNext: false, + }); + + await expect(promise).rejects.toThrow( + new CombinedGraphQLErrors({ errors: [{ message: CREATE_TODO_ERROR }] }) + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: new CombinedGraphQLErrors({ + data: { createTodo: { __typename: "Todo", id: 1 } }, + errors: [{ message: CREATE_TODO_ERROR }], + }), + loading: false, + called: true, + }); + } + + await expect(takeSnapshot).not.toRerender(); + + expect(onError).toHaveBeenCalledTimes(1); + expect(onError).toHaveBeenLastCalledWith( + new CombinedGraphQLErrors({ + data: { createTodo: { __typename: "Todo", id: 1 } }, + errors: [{ message: CREATE_TODO_ERROR }], + }), + expect.anything() + ); + expect(console.error).not.toHaveBeenCalled(); +}); + +test("calls the update function with the final merged result data", async () => { + using _ = spyOnConsole("error"); + const mutation = gql` + mutation createTodo($description: String!, $priority: String) { + createTodo(description: $description, priority: $priority) { + id + ... @defer { + description + priority + } + } + } + `; + const variables = { + description: "Get milk!", + }; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + const update = jest.fn(); + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation, { update }), + { + wrapper: createClientWrapper(client), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [createTodo] = getCurrentSnapshot(); + + const promiseReturnedByMutate = createTodo({ variables }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + enqueueInitialChunk({ + data: { + createTodo: { + id: 1, + __typename: "Todo", + }, + }, + pending: [{ id: "0", path: ["createTodo"] }], + hasNext: true, + }); + + await expect(takeSnapshot).not.toRerender(); + + enqueueSubsequentChunk({ + incremental: [ + { + data: { + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + await expect(promiseReturnedByMutate).resolves.toStrictEqualTyped({ + data: { + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + }, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: { + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + }, + error: undefined, + loading: false, + called: true, + }); + } + + await expect(takeSnapshot).not.toRerender(); + + expect(update).toHaveBeenCalledTimes(1); + expect(update).toHaveBeenCalledWith( + // the first item is the cache, which we don't need to make any + // assertions against in this test + expect.anything(), + // second argument is the result + expect.objectContaining({ + data: { + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + }, + }), + // third argument is an object containing context and variables + // but we only care about variables here + expect.objectContaining({ variables }) + ); + + expect(console.error).not.toHaveBeenCalled(); +}); From b53ce0d50838a84aa53e855f9d2c14da26a4ed12 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 13:21:29 -0600 Subject: [PATCH 090/254] Remove export/check for hasIncrementalChunks --- .../__tests__/graphql17Alpha9/defer.test.ts | 69 ------------------- src/incremental/handlers/graphql17Alpha9.ts | 3 +- 2 files changed, 1 insertion(+), 71 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts index b797d36a5c1..b61e7d2d4e7 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts @@ -33,11 +33,6 @@ import { wait, } from "@apollo/client/testing/internal"; -import { - hasIncrementalChunks, - // eslint-disable-next-line local-rules/no-relative-imports -} from "../../graphql17Alpha9.js"; - // This is the test setup of the `graphql-js` v17.0.0-alpha.9 release: // https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/defer-test.ts @@ -247,7 +242,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -263,7 +257,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -295,7 +288,6 @@ describe("graphql-js test cases", () => { assert(chunk); expect(handler.isIncrementalResult(chunk)).toBe(false); - expect(hasIncrementalChunks(chunk)).toBe(false); }); it.skip("Does not disable defer with null if argument", async () => { @@ -332,7 +324,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, }); @@ -344,7 +335,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -384,7 +374,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, }); @@ -396,7 +385,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -442,7 +430,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: {}, @@ -456,7 +443,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -498,7 +484,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -514,7 +499,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -554,7 +538,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: {}, @@ -568,7 +551,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -604,7 +586,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: {}, @@ -618,7 +599,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -657,7 +637,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, }); @@ -669,7 +648,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -712,7 +690,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: {}, @@ -726,7 +703,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -762,7 +738,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, }); @@ -774,7 +749,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -840,7 +814,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, }); @@ -852,7 +825,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { @@ -872,7 +844,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { @@ -943,7 +914,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, }); @@ -955,7 +925,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: {}, @@ -971,7 +940,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { @@ -1023,7 +991,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -1039,7 +1006,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -1099,7 +1065,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -1124,7 +1089,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -1176,7 +1140,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: {}, @@ -1190,7 +1153,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -1257,7 +1219,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -1277,7 +1238,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -1333,7 +1293,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -1351,7 +1310,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -1416,7 +1374,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { @@ -1434,7 +1391,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { @@ -1481,7 +1437,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, }); @@ -1493,7 +1448,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -1543,7 +1497,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: {}, @@ -1557,7 +1510,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { @@ -1617,7 +1569,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: {}, @@ -1631,7 +1582,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { @@ -1689,7 +1639,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, }); @@ -1701,7 +1650,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, errors: [ @@ -1764,7 +1712,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, }); @@ -1776,7 +1723,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { @@ -1849,7 +1795,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: {}, @@ -1863,7 +1808,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { @@ -1881,7 +1825,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { a: { @@ -1937,7 +1880,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, }); @@ -1949,7 +1891,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: null, @@ -2003,7 +1944,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -2019,7 +1959,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -2077,7 +2016,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -2093,7 +2031,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -2139,7 +2076,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -2155,7 +2091,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -2205,7 +2140,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -2221,7 +2155,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -2276,7 +2209,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { @@ -2292,7 +2224,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index a014c1b2897..d43230298dd 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -229,8 +229,7 @@ export class GraphQL17Alpha9Handler } } -// only exported for use in tests -export function hasIncrementalChunks( +function hasIncrementalChunks( result: Record ): result is Required { return isNonEmptyArray(result.incremental); From c7fba99e16da522fdbc35b9c16cdb8df0dda4c2c Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 22:41:23 -0600 Subject: [PATCH 091/254] Add changeset --- .changeset/little-yaks-decide.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 .changeset/little-yaks-decide.md diff --git a/.changeset/little-yaks-decide.md b/.changeset/little-yaks-decide.md new file mode 100644 index 00000000000..53aa1d9cd75 --- /dev/null +++ b/.changeset/little-yaks-decide.md @@ -0,0 +1,17 @@ +--- +"@apollo/client": minor +--- + +Support the newer incremental delivery format for the `@defer` directive implemented in `graphql@17.0.0-alpha.9`. Import the `GraphQL17Alpha9Handler` to use the newer incremental delivery format with `@defer`. + +```ts +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; + +const client = new ApolloClient({ + // ... + incrementalHandler: new GraphQL17Alpha9Handler(), +}); +``` + +> [!NOTE] +> In order to use the `GraphQL17Alpha9Handler`, the GraphQL server MUST implement the newer incremental delivery format. You may see errors or unusual behavior if you use the wrong handler. If you are using Apollo Router, continue to use the `Defer20220824Handler` because Apollo Router does not yet support the newer incremental delivery format. From c4a4228eb4123c19b15bbb5b27de154a7175a093 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 22:49:26 -0600 Subject: [PATCH 092/254] Update size limits --- .size-limits.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.size-limits.json b/.size-limits.json index c2bb067567d..feea5be9df0 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44246, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39057, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33470, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44249, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 38998, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33462, "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27490 } From 8052f24efaa3e9d19524b48839000470922c279e Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 23:45:03 -0600 Subject: [PATCH 093/254] Fix name of test file --- ...eferGraphQL17Alpha2.test.tsx => deferGraphQL17Alpha9.test.tsx} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/react/hooks/__tests__/useQuery/{deferGraphQL17Alpha2.test.tsx => deferGraphQL17Alpha9.test.tsx} (100%) diff --git a/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx b/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha9.test.tsx similarity index 100% rename from src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha2.test.tsx rename to src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha9.test.tsx From e0890e606c7663e880fc070c332080e2ac2e68a3 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 23:48:21 -0600 Subject: [PATCH 094/254] Initialize a new DeepMerger each time --- src/incremental/handlers/graphql17Alpha9.ts | 23 +++++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index d43230298dd..6f700bb4345 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -86,7 +86,6 @@ class IncrementalRequest private errors: GraphQLFormattedError[] = []; private extensions: Record = {}; private pending: GraphQL17Alpha9Handler.PendingResult[] = []; - private merger = new DeepMerger(); handle( cacheData: TData | DeepPartial | null | undefined = this.data, @@ -99,7 +98,7 @@ class IncrementalRequest this.pending.push(...chunk.pending); } - this.merge(chunk); + this.merge(chunk, new DeepMerger()); if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { @@ -126,11 +125,14 @@ class IncrementalRequest data = parent as typeof data; } - this.merge({ - data: data as TData, - extensions: incremental.extensions, - errors: incremental.errors, - }); + this.merge( + { + data: data as TData, + extensions: incremental.extensions, + errors: incremental.errors, + }, + new DeepMerger() + ); } } @@ -157,9 +159,12 @@ class IncrementalRequest return result; } - private merge(normalized: FormattedExecutionResult) { + private merge( + normalized: FormattedExecutionResult, + merger: DeepMerger + ) { if (normalized.data !== undefined) { - this.data = this.merger.merge(this.data, normalized.data); + this.data = merger.merge(this.data, normalized.data); } if (normalized.errors) { From d59dd307183e898f320dc6990f93f312316dac0b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 23:49:10 -0600 Subject: [PATCH 095/254] Fix incorrect assertion for useQuery test due to bug in handler --- .../hooks/__tests__/useQuery/deferGraphQL17Alpha9.test.tsx | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha9.test.tsx index 0e967508933..d70c095de26 100644 --- a/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useQuery/deferGraphQL17Alpha9.test.tsx @@ -522,7 +522,12 @@ test("should handle deferred queries with fetch policy no-cache", async () => { dataState: "complete", loading: false, networkStatus: NetworkStatus.ready, - previousData: undefined, + previousData: { + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }, variables: {}, }); From 27fc9dc8dd0a333d774c8223b777d112470dd8ac Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 23:55:28 -0600 Subject: [PATCH 096/254] Update size limits --- .size-limits.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.size-limits.json b/.size-limits.json index feea5be9df0..722c44c586a 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44249, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 38998, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44188, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39024, "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33462, "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27490 } From 000cef46602e63d4b4f3a826089c87cad1582083 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 09:56:38 -0600 Subject: [PATCH 097/254] Fix duplicate errors in extractErrors from initial chunk --- src/incremental/handlers/graphql17Alpha9.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 6f700bb4345..7e4d59af29b 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -217,10 +217,10 @@ export class GraphQL17Alpha9Handler } }; - push(result); - if (this.isIncrementalResult(result)) { push(new IncrementalRequest().handle(undefined, result)); + } else { + push(result); } if (acc.length) { From 14e5a9862956f24873aee56c873707f1aa1cede6 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 10:01:09 -0600 Subject: [PATCH 098/254] Update size limits --- .size-limits.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.size-limits.json b/.size-limits.json index 722c44c586a..e48d56978cf 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44188, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39024, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44206, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39060, "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33462, "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27490 } From 49873cb36816949385bbfa5d3cd54b22d28a6fcc Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 11:28:42 -0600 Subject: [PATCH 099/254] Add stream tests --- .../__tests__/graphql17Alpha9/stream.test.ts | 2505 +++++++++++++++++ 1 file changed, 2505 insertions(+) create mode 100644 src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts new file mode 100644 index 00000000000..2dda1d23da3 --- /dev/null +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -0,0 +1,2505 @@ +import assert from "node:assert"; + +import type { + DocumentNode, + FormattedExecutionResult, + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, +} from "graphql-17-alpha9"; +import { + experimentalExecuteIncrementally, + GraphQLID, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, +} from "graphql-17-alpha9"; + +import { gql } from "@apollo/client"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; + +import { + hasIncrementalChunks, + // eslint-disable-next-line local-rules/no-relative-imports +} from "../../graphql17Alpha9.js"; + +// This is the test setup of the `graphql-js` v17.0.0-alpha.9 release: +// https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/stream-test.ts + +const friendType = new GraphQLObjectType({ + fields: { + id: { type: GraphQLID }, + name: { type: GraphQLString }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, + }, + name: "Friend", +}); + +const friends = [ + { name: "Luke", id: 1 }, + { name: "Han", id: 2 }, + { name: "Leia", id: 3 }, +]; + +const query = new GraphQLObjectType({ + fields: { + scalarList: { + type: new GraphQLList(GraphQLString), + }, + scalarListList: { + type: new GraphQLList(new GraphQLList(GraphQLString)), + }, + friendList: { + type: new GraphQLList(friendType), + }, + nonNullFriendList: { + type: new GraphQLList(new GraphQLNonNull(friendType)), + }, + nestedObject: { + type: new GraphQLObjectType({ + name: "NestedObject", + fields: { + scalarField: { + type: GraphQLString, + }, + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + nestedFriendList: { type: new GraphQLList(friendType) }, + deeperNestedObject: { + type: new GraphQLObjectType({ + name: "DeeperNestedObject", + fields: { + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + deeperNestedFriendList: { type: new GraphQLList(friendType) }, + }, + }), + }, + }, + }), + }, + }, + name: "Query", +}); + +const schema = new GraphQLSchema({ query }); + +function resolveOnNextTick(): Promise { + return Promise.resolve(undefined); +} + +type PromiseOrValue = Promise | T; + +function promiseWithResolvers(): { + promise: Promise; + resolve: (value: T | PromiseOrValue) => void; + reject: (reason?: any) => void; +} { + // these are assigned synchronously within the Promise constructor + let resolve!: (value: T | PromiseOrValue) => void; + let reject!: (reason?: any) => void; + const promise = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + return { promise, resolve, reject }; +} + +async function* run( + document: DocumentNode, + rootValue: unknown = {}, + enableEarlyExecution = false +): AsyncGenerator< + | FormattedInitialIncrementalExecutionResult + | FormattedSubsequentIncrementalExecutionResult + | FormattedExecutionResult, + void +> { + const result = await experimentalExecuteIncrementally({ + schema, + document, + rootValue, + enableEarlyExecution, + }); + + if ("initialResult" in result) { + yield JSON.parse(JSON.stringify(result.initialResult)); + + for await (const patch of result.subsequentResults) { + yield JSON.parse(JSON.stringify(patch)); + } + } else { + yield JSON.parse(JSON.stringify(result)); + } +} + +describe("graphql-js test cases", () => { + // These test cases mirror stream tests of the `graphql-js` v17.0.0-alpha.9 release: + // https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/stream-test.ts + + it.skip("Can stream a list field", async () => { + const query = gql` + query { + scalarList @stream(initialCount: 1) + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + scalarList: () => ["apple", "banana", "coconut"], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple"], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple", "banana", "coconut"], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Can use default value of initialCount", async () => { + const query = gql` + query { + scalarList @stream + } + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + scalarList: () => ["apple", "banana", "coconut"], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple", "banana", "coconut"], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Negative values of initialCount throw field errors", async () => { + // from a client perspective, a regular graphql query + }); + + it.skip("Returns label from stream directive", async () => { + // from a client perspective, a repeat of a previous test + }); + + it.skip("Can disable @stream using if argument", async () => { + // from a client perspective, a regular graphql query + }); + + it.skip("Does not disable stream with null if argument", async () => { + const query = gql` + query ($shouldStream: Boolean) { + scalarList @stream(initialCount: 2, if: $shouldStream) + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + scalarList: () => ["apple", "banana", "coconut"], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple", "banana"], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple", "banana", "coconut"], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Can stream multi-dimensional lists", async () => { + const query = gql` + query { + scalarListList @stream(initialCount: 1) + } + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + scalarListList: () => [ + ["apple", "apple", "apple"], + ["banana", "banana", "banana"], + ["coconut", "coconut", "coconut"], + ], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarListList: [["apple", "apple", "apple"]], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarListList: [ + ["apple", "apple", "apple"], + ["banana", "banana", "banana"], + ["coconut", "coconut", "coconut"], + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Can stream a field that returns a list of promises", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { + name: "Luke", + id: "1", + }, + { + name: "Han", + id: "2", + }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { + name: "Luke", + id: "1", + }, + { + name: "Han", + id: "2", + }, + { + name: "Leia", + id: "3", + }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Can stream in correct order with lists of promises", async () => { + const query = gql` + query { + friendList @stream(initialCount: 0) { + name + id + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { + items: [{ name: "Luke", id: "1" }], + id: "0", + }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { + items: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + ], + id: "0", + }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { + items: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + id: "0", + }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Does not execute early if not specified", async () => { + const query = gql` + query { + friendList @stream(initialCount: 0) { + id + } + } + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => + friends.map((f, i) => ({ + id: async () => { + const slowness = 3 - i; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + return f.id; + }, + })), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1" }, { id: "2" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Executes early if specified", async () => { + const query = gql` + query { + friendList @stream(initialCount: 0) { + id + } + } + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run( + query, + { + friendList: () => + friends.map((f, i) => ({ + id: async () => { + const slowness = 3 - i; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + return f.id; + }, + })), + }, + true + ); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Can stream a field that returns a list with nested promises", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => + friends.map((f) => ({ + name: Promise.resolve(f.name), + id: Promise.resolve(f.id), + })), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { + name: "Luke", + id: "1", + }, + { + name: "Han", + id: "2", + }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { + name: "Luke", + id: "1", + }, + { + name: "Han", + id: "2", + }, + { + name: "Leia", + id: "3", + }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Handles rejections in a field that returns a list of promises before initialCount is reached", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => + friends.map((f, i) => { + if (i === 1) { + return Promise.reject(new Error("bad")); + } + return Promise.resolve(f); + }), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }, null], + }, + errors: [ + { + message: "bad", + locations: [{ line: 3, column: 9 }], + path: ["friendList", 1], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + null, + { name: "Leia", id: "3" }, + ], + }, + errors: [ + { + message: "bad", + locations: [{ line: 3, column: 9 }], + path: ["friendList", 1], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Handles rejections in a field that returns a list of promises after initialCount is reached", async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + name + id + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => + friends.map((f, i) => { + if (i === 1) { + return Promise.reject(new Error("bad")); + } + return Promise.resolve(f); + }), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }, null], + }, + errors: [ + { + message: "bad", + locations: [{ line: 3, column: 9 }], + path: ["friendList", 1], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + null, + { name: "Leia", id: "3" }, + ], + }, + errors: [ + { + message: "bad", + locations: [{ line: 3, column: 9 }], + path: ["friendList", 1], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Can stream a field that returns an async iterable", async () => { + const query = gql` + query { + friendList @stream { + name + id + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + yield await Promise.resolve(friends[2]); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Can stream a field that returns an async iterable, using a non-zero initialCount", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + yield await Promise.resolve(friends[2]); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Negative values of initialCount throw field errors on a field that returns an async iterable", async () => { + // from a client perspective, a regular graphql query + }); + + it.skip("Does not execute early if not specified, when streaming from an async iterable", async () => { + const query = gql` + query { + friendList @stream(initialCount: 0) { + id + } + } + `; + + const slowFriend = async (n: number) => ({ + id: async () => { + const slowness = (3 - n) * 10; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + return friends[n].id; + }, + }); + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(slowFriend(0)); + yield await Promise.resolve(slowFriend(1)); + yield await Promise.resolve(slowFriend(2)); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1" }, { id: "2" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Executes early if specified when streaming from an async iterable", async () => { + const query = gql` + query { + friendList @stream(initialCount: 0) { + id + } + } + `; + const order: Array = []; + const slowFriend = (n: number) => ({ + id: async () => { + const slowness = (3 - n) * 10; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + order.push(n); + return friends[n].id; + }, + }); + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run( + query, + { + async *friendList() { + yield await Promise.resolve(slowFriend(0)); + yield await Promise.resolve(slowFriend(1)); + yield await Promise.resolve(slowFriend(2)); + }, + }, + true + ); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Can handle concurrent calls to .next() without waiting", async () => { + const query = gql(` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `); + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + yield await Promise.resolve(friends[2]); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Handles error thrown in async iterable before initialCount is reached", async () => { + // from a client perspective, a regular graphql query + }); + + it.skip("Handles error thrown in async iterable after initialCount is reached", async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + name + id + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + throw new Error("bad"); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }], + }, + errors: [ + { + message: "bad", + locations: [{ line: 3, column: 9 }], + path: ["friendList"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Handles null returned in non-null list items after initialCount is reached", async () => { + const query = gql` + query { + nonNullFriendList @stream(initialCount: 1) { + name + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nonNullFriendList: () => [friends[0], null, friends[1]], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nonNullFriendList: [{ name: "Luke" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nonNullFriendList: [{ name: "Luke", id: "1" }], + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Query.nonNullFriendList.", + locations: [{ line: 3, column: 9 }], + path: ["nonNullFriendList", 1], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Handles null returned in non-null async iterable list items after initialCount is reached", async () => { + // from a client perspective, a repeat of the last test + }); + + it.skip("Handles errors thrown by completeValue after initialCount is reached", async () => { + const query = gql` + query { + scalarList @stream(initialCount: 1) + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + scalarList: () => [friends[0].name, {}], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["Luke"], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["Luke", null], + }, + errors: [ + { + message: "String cannot represent value: {}", + locations: [{ line: 3, column: 9 }], + path: ["scalarList", 1], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Handles async errors thrown by completeValue after initialCount is reached", async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => [ + Promise.resolve({ nonNullName: friends[0].name }), + Promise.resolve({ + nonNullName: () => Promise.reject(new Error("Oops")), + }), + Promise.resolve({ nonNullName: friends[1].name }), + ], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ nonNullName: "Luke" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ nonNullName: "Luke" }, null], + }, + errors: [ + { + message: "Oops", + locations: [{ line: 4, column: 11 }], + path: ["friendList", 1, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ nonNullName: "Luke" }, null, { nonNullName: "Han" }], + }, + errors: [ + { + message: "Oops", + locations: [{ line: 4, column: 11 }], + path: ["friendList", 1, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Handles nested async errors thrown by completeValue after initialCount is reached", async () => { + // from a client perspective, a repeat of the last test + }); + + it.skip("Handles async errors thrown by completeValue after initialCount is reached for a non-nullable list", async () => { + const query = gql` + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nonNullFriendList: () => [ + Promise.resolve({ nonNullName: friends[0].name }), + Promise.resolve({ + nonNullName: () => Promise.reject(new Error("Oops")), + }), + Promise.resolve({ nonNullName: friends[1].name }), + ], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nonNullFriendList: [{ nonNullName: "Luke" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nonNullFriendList: [{ nonNullName: "Luke" }], + }, + errors: [ + { + message: "Oops", + locations: [{ line: 4, column: 11 }], + path: ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Handles nested async errors thrown by completeValue after initialCount is reached for a non-nullable list", async () => { + // from a client perspective, a repeat of the last test + }); + + it.skip("Handles async errors thrown by completeValue after initialCount is reached from async iterable", async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve({ nonNullName: friends[0].name }); + yield await Promise.resolve({ + nonNullName: () => Promise.reject(new Error("Oops")), + }); + yield await Promise.resolve({ nonNullName: friends[1].name }); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ nonNullName: "Luke" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ nonNullName: "Luke" }, null], + }, + errors: [ + { + message: "Oops", + locations: [{ line: 4, column: 11 }], + path: ["friendList", 1, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ nonNullName: "Luke" }, null, { name: "Han" }], + }, + errors: [ + { + message: "Oops", + locations: [{ line: 4, column: 11 }], + path: ["friendList", 1, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ nonNullName: "Luke" }, null, { name: "Han" }], + }, + errors: [ + { + message: "Oops", + locations: [{ line: 4, column: 11 }], + path: ["friendList", 1, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Handles async errors thrown by completeValue after initialCount is reached from async generator for a non-nullable list", async () => { + // from a client perspective, a repeat of a previous test + }); + + it.skip("Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list when the async iterable does not provide a return method) ", async () => { + // from a client perspective, a repeat of a previous test + }); + + it.skip("Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list when the async iterable provides concurrent next/return methods and has a slow return ", async () => { + // from a client perspective, a repeat of a previous test + }); + + it.skip("Filters payloads that are nulled", async () => { + // from a client perspective, a regular graphql query + }); + + it.skip("Filters payloads that are nulled by a later synchronous error", async () => { + // from a client perspective, a regular graphql query + }); + + it.skip("Does not filter payloads when null error is in a different path", async () => { + const query = gql` + query { + otherNestedObject: nestedObject { + ... @defer { + scalarField + } + } + nestedObject { + nestedFriendList @stream(initialCount: 0) { + name + } + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nestedObject: { + scalarField: () => Promise.reject(new Error("Oops")), + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + }, + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + otherNestedObject: {}, + nestedObject: { nestedFriendList: [] }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + otherNestedObject: { scalarField: null }, + nestedObject: { nestedFriendList: [{ name: "Luke" }] }, + }, + errors: [ + { + message: "Oops", + locations: [{ line: 5, column: 13 }], + path: ["otherNestedObject", "scalarField"], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + otherNestedObject: { scalarField: null }, + nestedObject: { nestedFriendList: [{ name: "Luke" }] }, + }, + errors: [ + { + message: "Oops", + locations: [{ line: 5, column: 13 }], + path: ["otherNestedObject", "scalarField"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Filters stream payloads that are nulled in a deferred payload", async () => { + const query = gql` + query { + nestedObject { + ... @defer { + deeperNestedObject { + nonNullScalarField + deeperNestedFriendList @stream(initialCount: 0) { + name + } + } + } + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nestedObject: { + deeperNestedObject: { + nonNullScalarField: () => Promise.resolve(null), + async *deeperNestedFriendList() { + yield await Promise.resolve(friends[0]); /* c8 ignore start */ + } /* c8 ignore stop */, + }, + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: {}, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + deeperNestedObject: null, + }, + }, + errors: [ + { + message: + "Cannot return null for non-nullable field DeeperNestedObject.nonNullScalarField.", + path: ["nestedObject", "deeperNestedObject", "nonNullScalarField"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Filters defer payloads that are nulled in a stream response", async () => { + const query = gql` + query { + friendList @stream(initialCount: 0) { + nonNullName + ... @defer { + name + } + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve({ + name: friends[0].name, + nonNullName: () => Promise.resolve(null), + }); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [null], + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Friend.nonNullName.", + locations: [{ line: 4, column: 9 }], + path: ["friendList", 0, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [null], + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Friend.nonNullName.", + locations: [{ line: 4, column: 9 }], + path: ["friendList", 0, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Returns iterator and ignores errors when stream payloads are filtered", async () => { + // from a client perspective, a repeat of a previous test + }); + + it.skip("Handles promises returned by completeValue after initialCount is reached", async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + yield await Promise.resolve({ + id: friends[2].id, + name: () => Promise.resolve(friends[2].name), + }); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1", name: "Luke" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + { id: "3", name: "Leia" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + { id: "3", name: "Leia" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Handles overlapping deferred and non-deferred streams", async () => { + const query = gql` + query { + nestedObject { + nestedFriendList @stream(initialCount: 0) { + id + } + } + nestedObject { + ... @defer { + nestedFriendList @stream(initialCount: 0) { + id + name + } + } + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nestedObject: { + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + }, + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + nestedFriendList: [], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + nestedFriendList: [{ id: "1", name: "Luke" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + nestedFriendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + ], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + nestedFriendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + ], + }, + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Returns payloads in correct order when parent deferred fragment resolves slower than stream", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + + const query = gql` + query { + nestedObject { + ...DeferFragment @defer + } + } + fragment DeferFragment on NestedObject { + scalarField + nestedFriendList @stream(initialCount: 0) { + name + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nestedObject: { + scalarField: () => slowFieldPromise, + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + }, + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: {}, + }, + }); + expect(request.hasNext).toBe(true); + } + + resolveSlowField("slow"); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Can @defer fields that are resolved after async iterable is complete", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + const { + promise: iterableCompletionPromise, + resolve: resolveIterableCompletion, + } = promiseWithResolvers(); + + const query = gql` + query { + friendList @stream(label: "stream-label") { + ...NameFragment @defer(label: "DeferName") @defer(label: "DeferName") + id + } + } + fragment NameFragment on Friend { + name + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve({ + id: friends[1].id, + name: () => slowFieldPromise, + }); + await iterableCompletionPromise; + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + resolveIterableCompletion(null); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1", name: "Luke" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + resolveSlowField("Han"); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1", name: "Luke" }, { id: "2" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1", name: "Luke" }, { id: "2" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Can @defer fields that are resolved before async iterable is complete", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + const { + promise: iterableCompletionPromise, + resolve: resolveIterableCompletion, + } = promiseWithResolvers(); + + const query = gql` + query { + friendList @stream(initialCount: 1, label: "stream-label") { + ...NameFragment @defer(label: "DeferName") @defer(label: "DeferName") + id + } + } + fragment NameFragment on Friend { + name + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve({ + id: friends[1].id, + name: () => slowFieldPromise, + }); + await iterableCompletionPromise; + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(false); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + resolveSlowField("Han"); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1", name: "Luke" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1", name: "Luke" }, { id: "2" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + resolveIterableCompletion(null); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(hasIncrementalChunks(chunk)).toBe(true); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Returns underlying async iterables when returned generator is returned", async () => { + // not interesting from a client perspective + }); + + it.skip("Can return async iterable when underlying iterable does not have a return method", async () => { + // not interesting from a client perspective + }); + + it.skip("Returns underlying async iterables when returned generator is thrown", async () => { + // not interesting from a client perspective + }); +}); From b674efafd9ada97c59b7458e33ec02391a0c6424 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 11:38:50 -0600 Subject: [PATCH 100/254] First implementation of stream --- .../__tests__/graphql17Alpha9/stream.test.ts | 2 +- src/incremental/handlers/graphql17Alpha9.ts | 27 ++++++++++++++----- 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 2dda1d23da3..01205133714 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -140,7 +140,7 @@ describe("graphql-js test cases", () => { // These test cases mirror stream tests of the `graphql-js` v17.0.0-alpha.9 release: // https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/stream-test.ts - it.skip("Can stream a list field", async () => { + it("Can stream a list field", async () => { const query = gql` query { scalarList @stream(initialCount: 1) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 7e4d59af29b..a38513b590b 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -102,21 +102,34 @@ class IncrementalRequest if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { - // TODO: Implement support for `@stream`. For now we will skip handling - // streamed responses - if ("items" in incremental) { - continue; - } - const pending = this.pending.find(({ id }) => incremental.id === id); + invariant( pending, "Could not find pending chunk for incremental value. Please file an issue for the Apollo Client team to investigate." ); - let { data } = incremental; const path = pending.path.concat(incremental.subPath ?? []); + if ("items" in incremental) { + const array = path.reduce((data, key) => { + // Use `&&` to maintain `null` if encountered + return data && data[key]; + }, this.data); + + invariant( + Array.isArray(array), + `@stream: value at path %o is not an array. Please file an issue for the Apollo Client team to investigate.`, + path + ); + + array.push(...(incremental.items as ReadonlyArray)); + + continue; + } + + let { data } = incremental; + for (let i = path.length - 1; i >= 0; i--) { const key = path[i]; const parent: Record = From dbd6eda76ee6ccdc1a3e17cd681cc987c16a3695 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 11:41:15 -0600 Subject: [PATCH 101/254] Enable all tests --- .../__tests__/graphql17Alpha9/stream.test.ts | 58 +++++++++---------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 01205133714..8c6557e7664 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -183,7 +183,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Can use default value of initialCount", async () => { + it("Can use default value of initialCount", async () => { const query = gql` query { scalarList @stream @@ -237,7 +237,7 @@ describe("graphql-js test cases", () => { // from a client perspective, a regular graphql query }); - it.skip("Does not disable stream with null if argument", async () => { + it("Does not disable stream with null if argument", async () => { const query = gql` query ($shouldStream: Boolean) { scalarList @stream(initialCount: 2, if: $shouldStream) @@ -280,7 +280,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Can stream multi-dimensional lists", async () => { + it("Can stream multi-dimensional lists", async () => { const query = gql` query { scalarListList @stream(initialCount: 1) @@ -330,7 +330,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Can stream a field that returns a list of promises", async () => { + it("Can stream a field that returns a list of promises", async () => { const query = gql` query { friendList @stream(initialCount: 2) { @@ -398,7 +398,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Can stream in correct order with lists of promises", async () => { + it("Can stream in correct order with lists of promises", async () => { const query = gql` query { friendList @stream(initialCount: 0) { @@ -494,7 +494,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Does not execute early if not specified", async () => { + it("Does not execute early if not specified", async () => { const query = gql` query { friendList @stream(initialCount: 0) { @@ -575,7 +575,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Executes early if specified", async () => { + it("Executes early if specified", async () => { const query = gql` query { friendList @stream(initialCount: 0) { @@ -632,7 +632,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Can stream a field that returns a list with nested promises", async () => { + it("Can stream a field that returns a list with nested promises", async () => { const query = gql` query { friendList @stream(initialCount: 2) { @@ -703,7 +703,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Handles rejections in a field that returns a list of promises before initialCount is reached", async () => { + it("Handles rejections in a field that returns a list of promises before initialCount is reached", async () => { const query = gql` query { friendList @stream(initialCount: 2) { @@ -773,7 +773,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Handles rejections in a field that returns a list of promises after initialCount is reached", async () => { + it("Handles rejections in a field that returns a list of promises after initialCount is reached", async () => { const query = gql` query { friendList @stream(initialCount: 1) { @@ -857,7 +857,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Can stream a field that returns an async iterable", async () => { + it("Can stream a field that returns an async iterable", async () => { const query = gql` query { friendList @stream { @@ -960,7 +960,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Can stream a field that returns an async iterable, using a non-zero initialCount", async () => { + it("Can stream a field that returns an async iterable, using a non-zero initialCount", async () => { const query = gql` query { friendList @stream(initialCount: 2) { @@ -1039,7 +1039,7 @@ describe("graphql-js test cases", () => { // from a client perspective, a regular graphql query }); - it.skip("Does not execute early if not specified, when streaming from an async iterable", async () => { + it("Does not execute early if not specified, when streaming from an async iterable", async () => { const query = gql` query { friendList @stream(initialCount: 0) { @@ -1140,7 +1140,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Executes early if specified when streaming from an async iterable", async () => { + it("Executes early if specified when streaming from an async iterable", async () => { const query = gql` query { friendList @stream(initialCount: 0) { @@ -1204,7 +1204,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Can handle concurrent calls to .next() without waiting", async () => { + it("Can handle concurrent calls to .next() without waiting", async () => { const query = gql(` query { friendList @stream(initialCount: 2) { @@ -1283,7 +1283,7 @@ describe("graphql-js test cases", () => { // from a client perspective, a regular graphql query }); - it.skip("Handles error thrown in async iterable after initialCount is reached", async () => { + it("Handles error thrown in async iterable after initialCount is reached", async () => { const query = gql` query { friendList @stream(initialCount: 1) { @@ -1339,7 +1339,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Handles null returned in non-null list items after initialCount is reached", async () => { + it("Handles null returned in non-null list items after initialCount is reached", async () => { const query = gql` query { nonNullFriendList @stream(initialCount: 1) { @@ -1396,7 +1396,7 @@ describe("graphql-js test cases", () => { // from a client perspective, a repeat of the last test }); - it.skip("Handles errors thrown by completeValue after initialCount is reached", async () => { + it("Handles errors thrown by completeValue after initialCount is reached", async () => { const query = gql` query { scalarList @stream(initialCount: 1) @@ -1446,7 +1446,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Handles async errors thrown by completeValue after initialCount is reached", async () => { + it("Handles async errors thrown by completeValue after initialCount is reached", async () => { const query = gql` query { friendList @stream(initialCount: 1) { @@ -1529,7 +1529,7 @@ describe("graphql-js test cases", () => { // from a client perspective, a repeat of the last test }); - it.skip("Handles async errors thrown by completeValue after initialCount is reached for a non-nullable list", async () => { + it("Handles async errors thrown by completeValue after initialCount is reached for a non-nullable list", async () => { const query = gql` query { nonNullFriendList @stream(initialCount: 1) { @@ -1591,7 +1591,7 @@ describe("graphql-js test cases", () => { // from a client perspective, a repeat of the last test }); - it.skip("Handles async errors thrown by completeValue after initialCount is reached from async iterable", async () => { + it("Handles async errors thrown by completeValue after initialCount is reached from async iterable", async () => { const query = gql` query { friendList @stream(initialCount: 1) { @@ -1711,7 +1711,7 @@ describe("graphql-js test cases", () => { // from a client perspective, a regular graphql query }); - it.skip("Does not filter payloads when null error is in a different path", async () => { + it("Does not filter payloads when null error is in a different path", async () => { const query = gql` query { otherNestedObject: nestedObject { @@ -1799,7 +1799,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Filters stream payloads that are nulled in a deferred payload", async () => { + it("Filters stream payloads that are nulled in a deferred payload", async () => { const query = gql` query { nestedObject { @@ -1867,7 +1867,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Filters defer payloads that are nulled in a stream response", async () => { + it("Filters defer payloads that are nulled in a stream response", async () => { const query = gql` query { friendList @stream(initialCount: 0) { @@ -1954,7 +1954,7 @@ describe("graphql-js test cases", () => { // from a client perspective, a repeat of a previous test }); - it.skip("Handles promises returned by completeValue after initialCount is reached", async () => { + it("Handles promises returned by completeValue after initialCount is reached", async () => { const query = gql` query { friendList @stream(initialCount: 1) { @@ -2046,7 +2046,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Handles overlapping deferred and non-deferred streams", async () => { + it("Handles overlapping deferred and non-deferred streams", async () => { const query = gql` query { nestedObject { @@ -2148,7 +2148,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Returns payloads in correct order when parent deferred fragment resolves slower than stream", async () => { + it("Returns payloads in correct order when parent deferred fragment resolves slower than stream", async () => { const { promise: slowFieldPromise, resolve: resolveSlowField } = promiseWithResolvers(); @@ -2264,7 +2264,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Can @defer fields that are resolved after async iterable is complete", async () => { + it("Can @defer fields that are resolved after async iterable is complete", async () => { const { promise: slowFieldPromise, resolve: resolveSlowField } = promiseWithResolvers(); const { @@ -2376,7 +2376,7 @@ describe("graphql-js test cases", () => { } }); - it.skip("Can @defer fields that are resolved before async iterable is complete", async () => { + it("Can @defer fields that are resolved before async iterable is complete", async () => { const { promise: slowFieldPromise, resolve: resolveSlowField } = promiseWithResolvers(); const { From b3f35f4fac5d91e7fb8afe0eceb6bbac472fdce5 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 11:48:17 -0600 Subject: [PATCH 102/254] Fix some incorrect assertions on hasIncrementalChunks --- .../__tests__/graphql17Alpha9/stream.test.ts | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 8c6557e7664..f9a500e269b 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -1021,7 +1021,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -1130,7 +1130,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], @@ -1322,7 +1322,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ name: "Luke", id: "1" }], @@ -1374,7 +1374,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nonNullFriendList: [{ name: "Luke", id: "1" }], @@ -1570,7 +1570,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nonNullFriendList: [{ nonNullName: "Luke" }], @@ -1759,7 +1759,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); + expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { otherNestedObject: { scalarField: null }, @@ -2032,7 +2032,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -2133,7 +2133,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: { @@ -2251,7 +2251,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: { @@ -2349,7 +2349,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1", name: "Luke" }, { id: "2" }], @@ -2478,7 +2478,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ From 886be171a47644a432b69ad67036663e6e6d57b5 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 11:50:06 -0600 Subject: [PATCH 103/254] Remove locations from errors in assertions --- .../__tests__/graphql17Alpha9/stream.test.ts | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index f9a500e269b..1ae11c508d1 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -739,7 +739,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "bad", - locations: [{ line: 3, column: 9 }], path: ["friendList", 1], }, ], @@ -764,7 +763,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "bad", - locations: [{ line: 3, column: 9 }], path: ["friendList", 1], }, ], @@ -823,7 +821,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "bad", - locations: [{ line: 3, column: 9 }], path: ["friendList", 1], }, ], @@ -848,7 +845,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "bad", - locations: [{ line: 3, column: 9 }], path: ["friendList", 1], }, ], @@ -1330,7 +1326,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "bad", - locations: [{ line: 3, column: 9 }], path: ["friendList"], }, ], @@ -1383,7 +1378,6 @@ describe("graphql-js test cases", () => { { message: "Cannot return null for non-nullable field Query.nonNullFriendList.", - locations: [{ line: 3, column: 9 }], path: ["nonNullFriendList", 1], }, ], @@ -1437,7 +1431,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "String cannot represent value: {}", - locations: [{ line: 3, column: 9 }], path: ["scalarList", 1], }, ], @@ -1495,7 +1488,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "Oops", - locations: [{ line: 4, column: 11 }], path: ["friendList", 1, "nonNullName"], }, ], @@ -1516,7 +1508,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "Oops", - locations: [{ line: 4, column: 11 }], path: ["friendList", 1, "nonNullName"], }, ], @@ -1578,7 +1569,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "Oops", - locations: [{ line: 4, column: 11 }], path: ["nonNullFriendList", 1, "nonNullName"], }, ], @@ -1640,7 +1630,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "Oops", - locations: [{ line: 4, column: 11 }], path: ["friendList", 1, "nonNullName"], }, ], @@ -1661,7 +1650,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "Oops", - locations: [{ line: 4, column: 11 }], path: ["friendList", 1, "nonNullName"], }, ], @@ -1682,7 +1670,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "Oops", - locations: [{ line: 4, column: 11 }], path: ["friendList", 1, "nonNullName"], }, ], @@ -1768,7 +1755,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "Oops", - locations: [{ line: 5, column: 13 }], path: ["otherNestedObject", "scalarField"], }, ], @@ -1790,7 +1776,6 @@ describe("graphql-js test cases", () => { errors: [ { message: "Oops", - locations: [{ line: 5, column: 13 }], path: ["otherNestedObject", "scalarField"], }, ], @@ -1919,7 +1904,6 @@ describe("graphql-js test cases", () => { { message: "Cannot return null for non-nullable field Friend.nonNullName.", - locations: [{ line: 4, column: 9 }], path: ["friendList", 0, "nonNullName"], }, ], @@ -1941,7 +1925,6 @@ describe("graphql-js test cases", () => { { message: "Cannot return null for non-nullable field Friend.nonNullName.", - locations: [{ line: 4, column: 9 }], path: ["friendList", 0, "nonNullName"], }, ], From 503951eee11d790e2a49de21a7ae922cfa10cb56 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 13:12:31 -0600 Subject: [PATCH 104/254] Formatting --- src/incremental/handlers/graphql17Alpha9.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index a38513b590b..6914e823336 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -112,10 +112,11 @@ class IncrementalRequest const path = pending.path.concat(incremental.subPath ?? []); if ("items" in incremental) { - const array = path.reduce((data, key) => { + const array = path.reduce( // Use `&&` to maintain `null` if encountered - return data && data[key]; - }, this.data); + (data, key) => data && data[key], + this.data + ); invariant( Array.isArray(array), From 5f29b631bad76791fd0cb926c8f6e21cd09405ab Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 13:12:37 -0600 Subject: [PATCH 105/254] Merge errors for streamed results --- src/incremental/handlers/graphql17Alpha9.ts | 46 ++++++++++++--------- 1 file changed, 26 insertions(+), 20 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 6914e823336..91b6d0a6fd7 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -126,27 +126,33 @@ class IncrementalRequest array.push(...(incremental.items as ReadonlyArray)); - continue; - } - - let { data } = incremental; - - for (let i = path.length - 1; i >= 0; i--) { - const key = path[i]; - const parent: Record = - typeof key === "number" ? [] : {}; - parent[key] = data; - data = parent as typeof data; + this.merge( + { + extensions: incremental.extensions, + errors: incremental.errors, + }, + new DeepMerger() + ); + } else { + let { data } = incremental; + + for (let i = path.length - 1; i >= 0; i--) { + const key = path[i]; + const parent: Record = + typeof key === "number" ? [] : {}; + parent[key] = data; + data = parent as typeof data; + } + + this.merge( + { + data: data as TData, + extensions: incremental.extensions, + errors: incremental.errors, + }, + new DeepMerger() + ); } - - this.merge( - { - data: data as TData, - extensions: incremental.extensions, - errors: incremental.errors, - }, - new DeepMerger() - ); } } From ad1276f9c6622ca74ff7bec8e07450fdf9191df4 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 13:15:03 -0600 Subject: [PATCH 106/254] Fix incorrect assertions --- .../__tests__/graphql17Alpha9/stream.test.ts | 37 ++++++------------- 1 file changed, 11 insertions(+), 26 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 1ae11c508d1..5c546d1d71a 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -437,12 +437,7 @@ describe("graphql-js test cases", () => { expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { - friendList: [ - { - items: [{ name: "Luke", id: "1" }], - id: "0", - }, - ], + friendList: [{ name: "Luke", id: "1" }], }, }); expect(request.hasNext).toBe(true); @@ -457,13 +452,8 @@ describe("graphql-js test cases", () => { expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ - { - items: [ - { name: "Luke", id: "1" }, - { name: "Han", id: "2" }, - ], - id: "0", - }, + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, ], }, }); @@ -479,14 +469,9 @@ describe("graphql-js test cases", () => { expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ - { - items: [ - { name: "Luke", id: "1" }, - { name: "Han", id: "2" }, - { name: "Leia", id: "3" }, - ], - id: "0", - }, + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, ], }, }); @@ -1372,7 +1357,7 @@ describe("graphql-js test cases", () => { expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { - nonNullFriendList: [{ name: "Luke", id: "1" }], + nonNullFriendList: [{ name: "Luke" }], }, errors: [ { @@ -1645,7 +1630,7 @@ describe("graphql-js test cases", () => { expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { - friendList: [{ nonNullName: "Luke" }, null, { name: "Han" }], + friendList: [{ nonNullName: "Luke" }, null, { nonNullName: "Han" }], }, errors: [ { @@ -1662,10 +1647,10 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { - friendList: [{ nonNullName: "Luke" }, null, { name: "Han" }], + friendList: [{ nonNullName: "Luke" }, null, { nonNullName: "Han" }], }, errors: [ { @@ -1916,7 +1901,7 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); + expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [null], From 226a6efdca55d2f3b6198de0561346d2ad6b301c Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 13:18:03 -0600 Subject: [PATCH 107/254] Remove assertions on hasIncrementalChunks --- .../__tests__/graphql17Alpha9/stream.test.ts | 96 ------------------- 1 file changed, 96 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 5c546d1d71a..56a519d258f 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -19,11 +19,6 @@ import { import { gql } from "@apollo/client"; import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; -import { - hasIncrementalChunks, - // eslint-disable-next-line local-rules/no-relative-imports -} from "../../graphql17Alpha9.js"; - // This is the test setup of the `graphql-js` v17.0.0-alpha.9 release: // https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/stream-test.ts @@ -159,7 +154,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { scalarList: ["apple"], @@ -173,7 +167,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { scalarList: ["apple", "banana", "coconut"], @@ -201,7 +194,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { scalarList: [], @@ -215,7 +207,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { scalarList: ["apple", "banana", "coconut"], @@ -256,7 +247,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { scalarList: ["apple", "banana"], @@ -270,7 +260,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { scalarList: ["apple", "banana", "coconut"], @@ -302,7 +291,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { scalarListList: [["apple", "apple", "apple"]], @@ -316,7 +304,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { scalarListList: [ @@ -352,7 +339,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -375,7 +361,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -420,7 +405,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [], @@ -434,7 +418,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ name: "Luke", id: "1" }], @@ -448,7 +431,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -465,7 +447,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -508,7 +489,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [], @@ -522,7 +502,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1" }], @@ -536,7 +515,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1" }, { id: "2" }], @@ -550,7 +528,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], @@ -593,7 +570,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [], @@ -607,7 +583,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], @@ -642,7 +617,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -665,7 +639,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -716,7 +689,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ name: "Luke", id: "1" }, null], @@ -736,7 +708,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -784,7 +755,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ name: "Luke", id: "1" }], @@ -798,7 +768,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ name: "Luke", id: "1" }, null], @@ -818,7 +787,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -864,7 +832,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [], @@ -878,7 +845,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ name: "Luke", id: "1" }], @@ -892,7 +858,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -909,7 +874,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -927,7 +891,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -967,7 +930,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -984,7 +946,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -1002,7 +963,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -1055,7 +1015,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [], @@ -1069,7 +1028,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1" }], @@ -1083,7 +1041,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1" }, { id: "2" }], @@ -1097,7 +1054,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], @@ -1111,7 +1067,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], @@ -1161,7 +1116,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [], @@ -1175,7 +1129,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], @@ -1211,7 +1164,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -1228,7 +1180,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -1246,7 +1197,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -1289,7 +1239,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ name: "Luke", id: "1" }], @@ -1303,7 +1252,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ name: "Luke", id: "1" }], @@ -1340,7 +1288,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nonNullFriendList: [{ name: "Luke" }], @@ -1354,7 +1301,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nonNullFriendList: [{ name: "Luke" }], @@ -1394,7 +1340,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { scalarList: ["Luke"], @@ -1408,7 +1353,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { scalarList: ["Luke", null], @@ -1451,7 +1395,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ nonNullName: "Luke" }], @@ -1465,7 +1408,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ nonNullName: "Luke" }, null], @@ -1485,7 +1427,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ nonNullName: "Luke" }, null, { nonNullName: "Han" }], @@ -1532,7 +1473,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nonNullFriendList: [{ nonNullName: "Luke" }], @@ -1546,7 +1486,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nonNullFriendList: [{ nonNullName: "Luke" }], @@ -1593,7 +1532,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ nonNullName: "Luke" }], @@ -1607,7 +1545,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ nonNullName: "Luke" }, null], @@ -1627,7 +1564,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ nonNullName: "Luke" }, null, { nonNullName: "Han" }], @@ -1647,7 +1583,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ nonNullName: "Luke" }, null, { nonNullName: "Han" }], @@ -1716,7 +1651,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { otherNestedObject: {}, @@ -1731,7 +1665,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { otherNestedObject: { scalarField: null }, @@ -1752,7 +1685,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { otherNestedObject: { scalarField: null }, @@ -1804,7 +1736,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: {}, @@ -1818,7 +1749,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: { @@ -1866,7 +1796,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [], @@ -1880,7 +1809,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [null], @@ -1901,7 +1829,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [null], @@ -1951,7 +1878,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1", name: "Luke" }], @@ -1965,7 +1891,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -1982,7 +1907,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -2000,7 +1924,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -2050,7 +1973,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: { @@ -2066,7 +1988,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: { @@ -2082,7 +2003,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: { @@ -2101,7 +2021,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: { @@ -2152,7 +2071,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: {}, @@ -2168,7 +2086,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: { @@ -2185,7 +2102,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: { @@ -2202,7 +2118,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: { @@ -2219,7 +2134,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { nestedObject: { @@ -2271,7 +2185,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [], @@ -2287,7 +2200,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1", name: "Luke" }], @@ -2303,7 +2215,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1", name: "Luke" }, { id: "2" }], @@ -2317,7 +2228,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1", name: "Luke" }, { id: "2" }], @@ -2331,7 +2241,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -2383,7 +2292,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1" }], @@ -2399,7 +2307,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1", name: "Luke" }], @@ -2413,7 +2320,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [{ id: "1", name: "Luke" }, { id: "2" }], @@ -2427,7 +2333,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ @@ -2446,7 +2351,6 @@ describe("graphql-js test cases", () => { assert(!done); assert(handler.isIncrementalResult(chunk)); - expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { friendList: [ From 6b69a4e68cea5e0449ec9b7276e03f3cb86fb58b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 14:10:10 -0600 Subject: [PATCH 108/254] Add necessary changes to writeToStore to handle stream --- src/cache/inmemory/writeToStore.ts | 52 ++++++++++++++++++++++-------- 1 file changed, 38 insertions(+), 14 deletions(-) diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index b44b6eb02f6..761d6e54ca6 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -79,17 +79,18 @@ export interface WriteContext extends ReadMergeModifyContext { fieldNodeSet: Set; } >; - // Directive metadata for @client and @defer. We could use a bitfield for this + // Directive metadata for @client, @defer, and @stream. We could use a bitfield for this // information to save some space, and use that bitfield number as the keys in // the context.flavors Map. clientOnly: boolean; deferred: boolean; + streamed: boolean; flavors: Map; } type FlavorableWriteContext = Pick< WriteContext, - "clientOnly" | "deferred" | "flavors" + "clientOnly" | "deferred" | "streamed" | "flavors" >; // Since there are only four possible combinations of context.clientOnly and @@ -100,7 +101,8 @@ type FlavorableWriteContext = Pick< function getContextFlavor( context: TContext, clientOnly: TContext["clientOnly"], - deferred: TContext["deferred"] + deferred: TContext["deferred"], + streamed: TContext["streamed"] ): TContext { const key = `${clientOnly}${deferred}`; let flavored = context.flavors.get(key); @@ -108,12 +110,17 @@ function getContextFlavor( context.flavors.set( key, (flavored = - context.clientOnly === clientOnly && context.deferred === deferred ? + ( + context.clientOnly === clientOnly && + context.deferred === deferred && + context.streamed === streamed + ) ? context : { ...context, clientOnly, deferred, + streamed, }) ); } @@ -169,6 +176,7 @@ export class StoreWriter { incomingById: new Map(), clientOnly: false, deferred: false, + streamed: false, flavors: new Map(), }; @@ -352,7 +360,7 @@ export class StoreWriter { // Reset context.clientOnly and context.deferred to their default // values before processing nested selection sets. field.selectionSet ? - getContextFlavor(context, false, false) + getContextFlavor(context, false, false, false) : context, childTree ); @@ -395,6 +403,7 @@ export class StoreWriter { __DEV__ && !context.clientOnly && !context.deferred && + !context.streamed && !addTypenameToDocument.added(field) && // If the field has a read function, it may be a synthetic field or // provide a default value, so its absence from the written data should @@ -522,6 +531,7 @@ export class StoreWriter { WriteContext, | "clientOnly" | "deferred" + | "streamed" | "flavors" | "fragmentMap" | "lookupFragment" @@ -555,12 +565,13 @@ export class StoreWriter { ) { const visitedNode = limitingTrie.lookup( selectionSet, - // Because we take inheritedClientOnly and inheritedDeferred into + // Because we take inheritedClientOnly, inheritedDeferred, and inheritedStramed into // consideration here (in addition to selectionSet), it's possible for // the same selection set to be flattened more than once, if it appears // in the query with different @client and/or @directive configurations. inheritedContext.clientOnly, - inheritedContext.deferred + inheritedContext.deferred, + inheritedContext.streamed ); if (visitedNode.visited) return; visitedNode.visited = true; @@ -568,12 +579,12 @@ export class StoreWriter { selectionSet.selections.forEach((selection) => { if (!shouldInclude(selection, context.variables)) return; - let { clientOnly, deferred } = inheritedContext; + let { clientOnly, deferred, streamed } = inheritedContext; if ( - // Since the presence of @client or @defer on this field can only - // cause clientOnly or deferred to become true, we can skip the - // forEach loop if both clientOnly and deferred are already true. - !(clientOnly && deferred) && + // Since the presence of @client, @defer, or @stream on this field can only + // cause clientOnly, deferred, or streamed to become true, we can skip the + // forEach loop if clientOnly, deferred, and streamed are already true. + !(clientOnly && deferred && streamed) && isNonEmptyArray(selection.directives) ) { selection.directives.forEach((dir) => { @@ -591,6 +602,18 @@ export class StoreWriter { // TODO In the future, we may want to record args.label using // context.deferred, if a label is specified. } + if (name === "stream") { + const args = argumentsObjectFromField(dir, context.variables); + // The @stream directive takes an optional args.if boolean + // argument, similar to @include(if: boolean). Note that + // @stream(if: false) does not make context.deferred false, but + // instead behaves as if there was no @stream directive. + if (!args || (args as { if?: boolean }).if !== false) { + streamed = true; + } + // TODO In the future, we may want to record args.label using + // context.deferred, if a label is specified. + } }); } @@ -602,11 +625,12 @@ export class StoreWriter { // to true only if *all* paths have the directive (hence the &&). clientOnly = clientOnly && existing.clientOnly; deferred = deferred && existing.deferred; + streamed = streamed && existing.streamed; } fieldMap.set( selection, - getContextFlavor(context, clientOnly, deferred) + getContextFlavor(context, clientOnly, deferred, streamed) ); } else { const fragment = getFragmentFromSelection( @@ -632,7 +656,7 @@ export class StoreWriter { ) { flatten( fragment.selectionSet, - getContextFlavor(context, clientOnly, deferred) + getContextFlavor(context, clientOnly, deferred, streamed) ); } } From 8c864795df13d667d11bb02623d24fca50cc70cf Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 15:12:02 -0600 Subject: [PATCH 109/254] Add tests for stream with the full client --- .../__tests__/graphql17Alpha9/stream.test.ts | 286 +++++++++++++++++- 1 file changed, 285 insertions(+), 1 deletion(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 56a519d258f..d3e63f7e327 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -15,9 +15,22 @@ import { GraphQLSchema, GraphQLString, } from "graphql-17-alpha9"; +import { from } from "rxjs"; -import { gql } from "@apollo/client"; +import { + ApolloClient, + ApolloLink, + CombinedGraphQLErrors, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import { + markAsStreaming, + mockDeferStreamGraphQL17Alpha9, + ObservableStream, +} from "@apollo/client/testing/internal"; // This is the test setup of the `graphql-js` v17.0.0-alpha.9 release: // https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/stream-test.ts @@ -131,6 +144,12 @@ async function* run( } } +function createSchemaLink(rootValue?: Record) { + return new ApolloLink((operation) => { + return from(run(operation.query, rootValue)); + }); +} + describe("graphql-js test cases", () => { // These test cases mirror stream tests of the `graphql-js` v17.0.0-alpha.9 release: // https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/stream-test.ts @@ -2375,3 +2394,268 @@ describe("graphql-js test cases", () => { // not interesting from a client perspective }); }); + +test("GraphQL17Alpha9Handler can be used with `ApolloClient`", async () => { + const client = new ApolloClient({ + link: createSchemaLink({ friendList: friends }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); + +test("handles streamed scalar lists", async () => { + const client = new ApolloClient({ + link: createSchemaLink({ scalarList: ["apple", "banana", "orange"] }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query ScalarListQuery { + scalarList @stream(initialCount: 1) + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + scalarList: ["apple"], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + scalarList: ["apple", "banana", "orange"], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); + +test("merges cache updates that happen concurrently", async () => { + const stream = mockDeferStreamGraphQL17Alpha9(); + const client = new ApolloClient({ + link: stream.httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + stream.enqueueInitialChunk({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + pending: [{ id: "0", path: ["friendList"] }], + hasNext: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + client.cache.writeFragment({ + id: "Friend:1", + fragment: gql` + fragment FriendName on Friend { + name + } + `, + data: { + name: "Jedi", + }, + }); + + stream.enqueueSubsequentChunk({ + incremental: [ + { + items: [ + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ] as any, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + friendList: [ + { + __typename: "Friend", + id: "1", + name: "Jedi", // updated from cache + }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); + +test("handles errors from items before initialCount is reached", async () => { + const client = new ApolloClient({ + link: createSchemaLink({ + friendList: () => + friends.map((friend, i) => { + if (i === 1) { + return Promise.reject(new Error("bad")); + } + + return Promise.resolve(friend); + }), + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 2) { + id + name + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [{ message: "bad", path: ["friendList", 1] }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [{ message: "bad", path: ["friendList", 1] }], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); From f82c7851204162d11ae66143f2751fbd477a357a Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 15:17:46 -0600 Subject: [PATCH 110/254] Move most stream tests to client.watchQuery folder --- .../streamGraphql17Alpha9.test.ts | 339 ++++++++++++++++++ .../__tests__/graphql17Alpha9/stream.test.ts | 216 +---------- 2 files changed, 341 insertions(+), 214 deletions(-) create mode 100644 src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts diff --git a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts new file mode 100644 index 00000000000..80eb5e1a382 --- /dev/null +++ b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts @@ -0,0 +1,339 @@ +import type { + FormattedExecutionResult, + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, +} from "graphql-17-alpha9"; +import { + experimentalExecuteIncrementally, + GraphQLID, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, +} from "graphql-17-alpha9"; +import { from } from "rxjs"; + +import type { DocumentNode } from "@apollo/client"; +import { + ApolloClient, + ApolloLink, + CombinedGraphQLErrors, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import { + markAsStreaming, + mockDeferStreamGraphQL17Alpha9, + ObservableStream, +} from "@apollo/client/testing/internal"; + +const friendType = new GraphQLObjectType({ + fields: { + id: { type: GraphQLID }, + name: { type: GraphQLString }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, + }, + name: "Friend", +}); + +const friends = [ + { name: "Luke", id: 1 }, + { name: "Han", id: 2 }, + { name: "Leia", id: 3 }, +]; + +const query = new GraphQLObjectType({ + fields: { + scalarList: { + type: new GraphQLList(GraphQLString), + }, + scalarListList: { + type: new GraphQLList(new GraphQLList(GraphQLString)), + }, + friendList: { + type: new GraphQLList(friendType), + }, + nonNullFriendList: { + type: new GraphQLList(new GraphQLNonNull(friendType)), + }, + nestedObject: { + type: new GraphQLObjectType({ + name: "NestedObject", + fields: { + scalarField: { + type: GraphQLString, + }, + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + nestedFriendList: { type: new GraphQLList(friendType) }, + deeperNestedObject: { + type: new GraphQLObjectType({ + name: "DeeperNestedObject", + fields: { + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + deeperNestedFriendList: { type: new GraphQLList(friendType) }, + }, + }), + }, + }, + }), + }, + }, + name: "Query", +}); + +const schema = new GraphQLSchema({ query }); + +async function* run( + document: DocumentNode, + rootValue: unknown = {}, + enableEarlyExecution = false +): AsyncGenerator< + | FormattedInitialIncrementalExecutionResult + | FormattedSubsequentIncrementalExecutionResult + | FormattedExecutionResult, + void +> { + const result = await experimentalExecuteIncrementally({ + schema, + document, + rootValue, + enableEarlyExecution, + }); + + if ("initialResult" in result) { + yield JSON.parse(JSON.stringify(result.initialResult)); + + for await (const patch of result.subsequentResults) { + yield JSON.parse(JSON.stringify(patch)); + } + } else { + yield JSON.parse(JSON.stringify(result)); + } +} + +function createLink(rootValue?: Record) { + return new ApolloLink((operation) => { + return from(run(operation.query, rootValue)); + }); +} + +test("handles streamed scalar lists", async () => { + const client = new ApolloClient({ + link: createLink({ scalarList: ["apple", "banana", "orange"] }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query ScalarListQuery { + scalarList @stream(initialCount: 1) + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + scalarList: ["apple"], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + scalarList: ["apple", "banana", "orange"], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); + +test("merges cache updates that happen concurrently", async () => { + const stream = mockDeferStreamGraphQL17Alpha9(); + const client = new ApolloClient({ + link: stream.httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + stream.enqueueInitialChunk({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + pending: [{ id: "0", path: ["friendList"] }], + hasNext: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + client.cache.writeFragment({ + id: "Friend:1", + fragment: gql` + fragment FriendName on Friend { + name + } + `, + data: { + name: "Jedi", + }, + }); + + stream.enqueueSubsequentChunk({ + incremental: [ + { + items: [ + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ] as any, + id: "0", + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + friendList: [ + { + __typename: "Friend", + id: "1", + name: "Jedi", // updated from cache + }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); + +test("handles errors from items before initialCount is reached", async () => { + const client = new ApolloClient({ + link: createLink({ + friendList: () => + friends.map((friend, i) => { + if (i === 1) { + return Promise.reject(new Error("bad")); + } + + return Promise.resolve(friend); + }), + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 2) { + id + name + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [{ message: "bad", path: ["friendList", 1] }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [{ message: "bad", path: ["friendList", 1] }], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index d3e63f7e327..64d5ca817ae 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -2395,6 +2395,8 @@ describe("graphql-js test cases", () => { }); }); +// quick smoke test. More exhaustive `@stream` tests can be found in +// src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts test("GraphQL17Alpha9Handler can be used with `ApolloClient`", async () => { const client = new ApolloClient({ link: createSchemaLink({ friendList: friends }), @@ -2445,217 +2447,3 @@ test("GraphQL17Alpha9Handler can be used with `ApolloClient`", async () => { partial: false, }); }); - -test("handles streamed scalar lists", async () => { - const client = new ApolloClient({ - link: createSchemaLink({ scalarList: ["apple", "banana", "orange"] }), - cache: new InMemoryCache(), - incrementalHandler: new GraphQL17Alpha9Handler(), - }); - - const query = gql` - query ScalarListQuery { - scalarList @stream(initialCount: 1) - } - `; - - const observableStream = new ObservableStream(client.watchQuery({ query })); - - await expect(observableStream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - await expect(observableStream).toEmitTypedValue({ - loading: true, - data: markAsStreaming({ - scalarList: ["apple"], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - partial: true, - }); - - await expect(observableStream).toEmitTypedValue({ - data: { - scalarList: ["apple", "banana", "orange"], - }, - dataState: "complete", - loading: false, - networkStatus: NetworkStatus.ready, - partial: false, - }); -}); - -test("merges cache updates that happen concurrently", async () => { - const stream = mockDeferStreamGraphQL17Alpha9(); - const client = new ApolloClient({ - link: stream.httpLink, - cache: new InMemoryCache(), - incrementalHandler: new GraphQL17Alpha9Handler(), - }); - - const query = gql` - query FriendListQuery { - friendList @stream(initialCount: 1) { - id - name - } - } - `; - - const observableStream = new ObservableStream(client.watchQuery({ query })); - - await expect(observableStream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - stream.enqueueInitialChunk({ - data: { - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], - }, - pending: [{ id: "0", path: ["friendList"] }], - hasNext: true, - }); - - await expect(observableStream).toEmitTypedValue({ - data: markAsStreaming({ - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - partial: true, - }); - - client.cache.writeFragment({ - id: "Friend:1", - fragment: gql` - fragment FriendName on Friend { - name - } - `, - data: { - name: "Jedi", - }, - }); - - stream.enqueueSubsequentChunk({ - incremental: [ - { - items: [ - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ] as any, - id: "0", - }, - ], - completed: [{ id: "0" }], - hasNext: false, - }); - - await expect(observableStream).toEmitTypedValue({ - data: { - friendList: [ - { - __typename: "Friend", - id: "1", - name: "Jedi", // updated from cache - }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }, - dataState: "complete", - loading: false, - networkStatus: NetworkStatus.ready, - partial: false, - }); -}); - -test("handles errors from items before initialCount is reached", async () => { - const client = new ApolloClient({ - link: createSchemaLink({ - friendList: () => - friends.map((friend, i) => { - if (i === 1) { - return Promise.reject(new Error("bad")); - } - - return Promise.resolve(friend); - }), - }), - cache: new InMemoryCache(), - incrementalHandler: new GraphQL17Alpha9Handler(), - }); - - const query = gql` - query FriendListQuery { - friendList @stream(initialCount: 2) { - id - name - } - } - `; - - const observableStream = new ObservableStream( - client.watchQuery({ query, errorPolicy: "all" }) - ); - - await expect(observableStream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - await expect(observableStream).toEmitTypedValue({ - data: markAsStreaming({ - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], - }), - error: new CombinedGraphQLErrors({ - data: { - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], - }, - errors: [{ message: "bad", path: ["friendList", 1] }], - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - partial: true, - }); - - await expect(observableStream).toEmitTypedValue({ - data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - null, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }), - error: new CombinedGraphQLErrors({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - null, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }, - errors: [{ message: "bad", path: ["friendList", 1] }], - }), - dataState: "complete", - loading: false, - networkStatus: NetworkStatus.error, - partial: false, - }); - - await expect(observableStream).not.toEmitAnything(); -}); From 51b0bc3780123dab8ddc2228edf819353e5423c9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 15:21:58 -0600 Subject: [PATCH 111/254] Fix issue with frozen arrays --- src/incremental/handlers/graphql17Alpha9.ts | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 91b6d0a6fd7..b3638675b8b 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -112,22 +112,32 @@ class IncrementalRequest const path = pending.path.concat(incremental.subPath ?? []); if ("items" in incremental) { - const array = path.reduce( + let data = path.reduce( // Use `&&` to maintain `null` if encountered (data, key) => data && data[key], this.data ); invariant( - Array.isArray(array), + Array.isArray(data), `@stream: value at path %o is not an array. Please file an issue for the Apollo Client team to investigate.`, path ); - array.push(...(incremental.items as ReadonlyArray)); + if (data) { + for (let i = path.length - 1; i >= 0; i--) { + const key = path[i]; + const parent: Record = + typeof key === "number" ? [] : {}; + parent[key] = + i === path.length - 1 ? data.concat(incremental.items) : data; + data = parent as typeof data; + } + } this.merge( { + data, extensions: incremental.extensions, errors: incremental.errors, }, From ab5b085521a27a3c5e8ae7768ff755b788a6d0c3 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 15:23:13 -0600 Subject: [PATCH 112/254] Simplify --- src/incremental/handlers/graphql17Alpha9.ts | 72 ++++++--------------- 1 file changed, 21 insertions(+), 51 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index b3638675b8b..8062549bf15 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -110,59 +110,29 @@ class IncrementalRequest ); const path = pending.path.concat(incremental.subPath ?? []); - - if ("items" in incremental) { - let data = path.reduce( - // Use `&&` to maintain `null` if encountered - (data, key) => data && data[key], - this.data - ); - - invariant( - Array.isArray(data), - `@stream: value at path %o is not an array. Please file an issue for the Apollo Client team to investigate.`, + let data = + "items" in incremental ? path - ); - - if (data) { - for (let i = path.length - 1; i >= 0; i--) { - const key = path[i]; - const parent: Record = - typeof key === "number" ? [] : {}; - parent[key] = - i === path.length - 1 ? data.concat(incremental.items) : data; - data = parent as typeof data; - } - } - - this.merge( - { - data, - extensions: incremental.extensions, - errors: incremental.errors, - }, - new DeepMerger() - ); - } else { - let { data } = incremental; - - for (let i = path.length - 1; i >= 0; i--) { - const key = path[i]; - const parent: Record = - typeof key === "number" ? [] : {}; - parent[key] = data; - data = parent as typeof data; - } - - this.merge( - { - data: data as TData, - extensions: incremental.extensions, - errors: incremental.errors, - }, - new DeepMerger() - ); + .reduce((data, key) => data[key], this.data) + .concat(incremental.items) + : incremental.data; + + for (let i = path.length - 1; i >= 0; i--) { + const key = path[i]; + const parent: Record = + typeof key === "number" ? [] : {}; + parent[key] = data; + data = parent as typeof data; } + + this.merge( + { + data, + extensions: incremental.extensions, + errors: incremental.errors, + }, + new DeepMerger() + ); } } From 2dda3d1ad26a8a1aa9ea3ce9d63d63343943a634 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 15:34:37 -0600 Subject: [PATCH 113/254] Revert "Add necessary changes to writeToStore to handle stream" This reverts commit dba0d09b0f2758fe7628e151c5f8f74399bcb200. --- src/cache/inmemory/writeToStore.ts | 52 ++++++++---------------------- 1 file changed, 14 insertions(+), 38 deletions(-) diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index 761d6e54ca6..b44b6eb02f6 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -79,18 +79,17 @@ export interface WriteContext extends ReadMergeModifyContext { fieldNodeSet: Set; } >; - // Directive metadata for @client, @defer, and @stream. We could use a bitfield for this + // Directive metadata for @client and @defer. We could use a bitfield for this // information to save some space, and use that bitfield number as the keys in // the context.flavors Map. clientOnly: boolean; deferred: boolean; - streamed: boolean; flavors: Map; } type FlavorableWriteContext = Pick< WriteContext, - "clientOnly" | "deferred" | "streamed" | "flavors" + "clientOnly" | "deferred" | "flavors" >; // Since there are only four possible combinations of context.clientOnly and @@ -101,8 +100,7 @@ type FlavorableWriteContext = Pick< function getContextFlavor( context: TContext, clientOnly: TContext["clientOnly"], - deferred: TContext["deferred"], - streamed: TContext["streamed"] + deferred: TContext["deferred"] ): TContext { const key = `${clientOnly}${deferred}`; let flavored = context.flavors.get(key); @@ -110,17 +108,12 @@ function getContextFlavor( context.flavors.set( key, (flavored = - ( - context.clientOnly === clientOnly && - context.deferred === deferred && - context.streamed === streamed - ) ? + context.clientOnly === clientOnly && context.deferred === deferred ? context : { ...context, clientOnly, deferred, - streamed, }) ); } @@ -176,7 +169,6 @@ export class StoreWriter { incomingById: new Map(), clientOnly: false, deferred: false, - streamed: false, flavors: new Map(), }; @@ -360,7 +352,7 @@ export class StoreWriter { // Reset context.clientOnly and context.deferred to their default // values before processing nested selection sets. field.selectionSet ? - getContextFlavor(context, false, false, false) + getContextFlavor(context, false, false) : context, childTree ); @@ -403,7 +395,6 @@ export class StoreWriter { __DEV__ && !context.clientOnly && !context.deferred && - !context.streamed && !addTypenameToDocument.added(field) && // If the field has a read function, it may be a synthetic field or // provide a default value, so its absence from the written data should @@ -531,7 +522,6 @@ export class StoreWriter { WriteContext, | "clientOnly" | "deferred" - | "streamed" | "flavors" | "fragmentMap" | "lookupFragment" @@ -565,13 +555,12 @@ export class StoreWriter { ) { const visitedNode = limitingTrie.lookup( selectionSet, - // Because we take inheritedClientOnly, inheritedDeferred, and inheritedStramed into + // Because we take inheritedClientOnly and inheritedDeferred into // consideration here (in addition to selectionSet), it's possible for // the same selection set to be flattened more than once, if it appears // in the query with different @client and/or @directive configurations. inheritedContext.clientOnly, - inheritedContext.deferred, - inheritedContext.streamed + inheritedContext.deferred ); if (visitedNode.visited) return; visitedNode.visited = true; @@ -579,12 +568,12 @@ export class StoreWriter { selectionSet.selections.forEach((selection) => { if (!shouldInclude(selection, context.variables)) return; - let { clientOnly, deferred, streamed } = inheritedContext; + let { clientOnly, deferred } = inheritedContext; if ( - // Since the presence of @client, @defer, or @stream on this field can only - // cause clientOnly, deferred, or streamed to become true, we can skip the - // forEach loop if clientOnly, deferred, and streamed are already true. - !(clientOnly && deferred && streamed) && + // Since the presence of @client or @defer on this field can only + // cause clientOnly or deferred to become true, we can skip the + // forEach loop if both clientOnly and deferred are already true. + !(clientOnly && deferred) && isNonEmptyArray(selection.directives) ) { selection.directives.forEach((dir) => { @@ -602,18 +591,6 @@ export class StoreWriter { // TODO In the future, we may want to record args.label using // context.deferred, if a label is specified. } - if (name === "stream") { - const args = argumentsObjectFromField(dir, context.variables); - // The @stream directive takes an optional args.if boolean - // argument, similar to @include(if: boolean). Note that - // @stream(if: false) does not make context.deferred false, but - // instead behaves as if there was no @stream directive. - if (!args || (args as { if?: boolean }).if !== false) { - streamed = true; - } - // TODO In the future, we may want to record args.label using - // context.deferred, if a label is specified. - } }); } @@ -625,12 +602,11 @@ export class StoreWriter { // to true only if *all* paths have the directive (hence the &&). clientOnly = clientOnly && existing.clientOnly; deferred = deferred && existing.deferred; - streamed = streamed && existing.streamed; } fieldMap.set( selection, - getContextFlavor(context, clientOnly, deferred, streamed) + getContextFlavor(context, clientOnly, deferred) ); } else { const fragment = getFragmentFromSelection( @@ -656,7 +632,7 @@ export class StoreWriter { ) { flatten( fragment.selectionSet, - getContextFlavor(context, clientOnly, deferred, streamed) + getContextFlavor(context, clientOnly, deferred) ); } } From 292b5b3632ae3ab00b5bd3e8c5bca5d884e15df5 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 15:45:44 -0600 Subject: [PATCH 114/254] Don't add stream directive to field name --- src/utilities/internal/storeKeyNameFromField.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/utilities/internal/storeKeyNameFromField.ts b/src/utilities/internal/storeKeyNameFromField.ts index e522cbfc832..cc7496ec9bb 100644 --- a/src/utilities/internal/storeKeyNameFromField.ts +++ b/src/utilities/internal/storeKeyNameFromField.ts @@ -12,6 +12,8 @@ export function storeKeyNameFromField( if (field.directives) { directivesObj = {}; field.directives.forEach((directive) => { + if (directive.name.value === "stream") return; + directivesObj[directive.name.value] = {}; if (directive.arguments) { From 17b0bafc2a0d480caaf20d4a30a344fdb144afe0 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 15:58:49 -0600 Subject: [PATCH 115/254] Add test for writing stream field to cache --- src/cache/inmemory/__tests__/cache.ts | 43 +++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/cache/inmemory/__tests__/cache.ts b/src/cache/inmemory/__tests__/cache.ts index 2a16c513bc5..26eedf301bf 100644 --- a/src/cache/inmemory/__tests__/cache.ts +++ b/src/cache/inmemory/__tests__/cache.ts @@ -1042,6 +1042,49 @@ describe("Cache", () => { }); } ); + + it("does not write @stream directive as part of the cache key", () => { + const cache = new InMemoryCache(); + + cache.writeQuery({ + data: { + list: [{ __typename: "Item", id: "1", value: 1 }], + }, + query: gql` + query { + list @stream(initialCount: 1) { + id + value + } + } + `, + }); + + expect(cache.extract()).toStrictEqualTyped({ + ROOT_QUERY: { + __typename: "Query", + list: [{ __ref: "Item:1" }], + }, + "Item:1": { __typename: "Item", id: "1", value: 1 }, + }); + + // We should be able to read the list without the `@stream` directive and + // get back results + expect( + cache.readQuery({ + query: gql` + query { + list { + id + value + } + } + `, + }) + ).toStrictEqualTyped({ + list: [{ __typename: "Item", id: "1", value: 1 }], + }); + }); }); describe("writeFragment", () => { From 64ed794245b40974556b5341000b08096d474dd7 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:01:19 -0600 Subject: [PATCH 116/254] Add stream to known directives --- src/utilities/internal/getStoreKeyName.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/utilities/internal/getStoreKeyName.ts b/src/utilities/internal/getStoreKeyName.ts index e8056c246af..0b63ebcce30 100644 --- a/src/utilities/internal/getStoreKeyName.ts +++ b/src/utilities/internal/getStoreKeyName.ts @@ -14,6 +14,7 @@ const KNOWN_DIRECTIVES: string[] = [ "rest", "export", "nonreactive", + "stream", ]; // Default stable JSON.stringify implementation used by getStoreKeyName. Can be From 0bea917cea4cba1607d41b242351cb76ba9a8bb9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:02:09 -0600 Subject: [PATCH 117/254] Remove check in storeKeyNameFromField --- src/utilities/internal/storeKeyNameFromField.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/utilities/internal/storeKeyNameFromField.ts b/src/utilities/internal/storeKeyNameFromField.ts index cc7496ec9bb..e522cbfc832 100644 --- a/src/utilities/internal/storeKeyNameFromField.ts +++ b/src/utilities/internal/storeKeyNameFromField.ts @@ -12,8 +12,6 @@ export function storeKeyNameFromField( if (field.directives) { directivesObj = {}; field.directives.forEach((directive) => { - if (directive.name.value === "stream") return; - directivesObj[directive.name.value] = {}; if (directive.arguments) { From 7853aa8c54f2e200d202832189de1ec793cda9de Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:03:50 -0600 Subject: [PATCH 118/254] Remove unused imports --- .../handlers/__tests__/graphql17Alpha9/stream.test.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 64d5ca817ae..de6ea29806a 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -20,7 +20,6 @@ import { from } from "rxjs"; import { ApolloClient, ApolloLink, - CombinedGraphQLErrors, gql, InMemoryCache, NetworkStatus, @@ -28,7 +27,6 @@ import { import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { markAsStreaming, - mockDeferStreamGraphQL17Alpha9, ObservableStream, } from "@apollo/client/testing/internal"; From 240bf89845fcaeb64b5f701bc98f4996943cc998 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:12:58 -0600 Subject: [PATCH 119/254] Add more tests for different scenarios --- .../streamGraphql17Alpha9.test.ts | 240 ++++++++++++++++++ 1 file changed, 240 insertions(+) diff --git a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts index 80eb5e1a382..7d0872fcf2f 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts @@ -168,6 +168,60 @@ test("handles streamed scalar lists", async () => { }); }); +test("handles streamed multi-dimensional lists", async () => { + const client = new ApolloClient({ + link: createLink({ + scalarListList: [ + ["apple", "apple", "apple"], + ["banana", "banana", "banana"], + ["coconut", "coconut", "coconut"], + ], + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query ScalarListQuery { + scalarListList @stream(initialCount: 1) + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + scalarListList: [["apple", "apple", "apple"]], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + scalarListList: [ + ["apple", "apple", "apple"], + ["banana", "banana", "banana"], + ["coconut", "coconut", "coconut"], + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); + test("merges cache updates that happen concurrently", async () => { const stream = mockDeferStreamGraphQL17Alpha9(); const client = new ApolloClient({ @@ -337,3 +391,189 @@ test("handles errors from items before initialCount is reached", async () => { await expect(observableStream).not.toEmitAnything(); }); + +test("handles errors from items after initialCount is reached", async () => { + const client = new ApolloClient({ + link: createLink({ + friendList: () => + friends.map((friend, i) => { + if (i === 1) { + return Promise.reject(new Error("bad")); + } + + return Promise.resolve(friend); + }), + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [{ message: "bad", path: ["friendList", 1] }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [{ message: "bad", path: ["friendList", 1] }], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); + +test("handles final chunk without incremental value", async () => { + const client = new ApolloClient({ + link: createLink({ + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + yield await Promise.resolve(friends[2]); + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream { + id + name + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); From 91e7e3c86652e174a6d7c6f53aece9258175dfe6 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:18:56 -0600 Subject: [PATCH 120/254] Rerun api report --- .api-reports/api-report-utilities_internal.api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.api-reports/api-report-utilities_internal.api.md b/.api-reports/api-report-utilities_internal.api.md index f4329853077..d1055bf60ca 100644 --- a/.api-reports/api-report-utilities_internal.api.md +++ b/.api-reports/api-report-utilities_internal.api.md @@ -449,7 +449,7 @@ export type VariablesOption = {} extends // Warnings were encountered during analysis: // -// src/utilities/internal/getStoreKeyName.ts:88:1 - (ae-forgotten-export) The symbol "storeKeyNameStringify" needs to be exported by the entry point index.d.ts +// src/utilities/internal/getStoreKeyName.ts:89:1 - (ae-forgotten-export) The symbol "storeKeyNameStringify" needs to be exported by the entry point index.d.ts // (No @packageDocumentation comment for this package) From d9d657e912374e604bd1e82dd9d34d8b7b1dc66d Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:23:17 -0600 Subject: [PATCH 121/254] Formatting --- .../__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts index 7d0872fcf2f..07e4d651bfe 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts @@ -148,11 +148,11 @@ test("handles streamed scalar lists", async () => { }); await expect(observableStream).toEmitTypedValue({ - loading: true, data: markAsStreaming({ scalarList: ["apple"], }), dataState: "streaming", + loading: true, networkStatus: NetworkStatus.streaming, partial: true, }); @@ -198,11 +198,11 @@ test("handles streamed multi-dimensional lists", async () => { }); await expect(observableStream).toEmitTypedValue({ - loading: true, data: markAsStreaming({ scalarListList: [["apple", "apple", "apple"]], }), dataState: "streaming", + loading: true, networkStatus: NetworkStatus.streaming, partial: true, }); From ce8b922368197f9a6a23ba321b55be1a4e96a248 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:26:04 -0600 Subject: [PATCH 122/254] More stream tests --- .../streamGraphql17Alpha9.test.ts | 120 ++++++++++++++++++ 1 file changed, 120 insertions(+) diff --git a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts index 07e4d651bfe..270ffb8cd52 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts @@ -577,3 +577,123 @@ test("handles final chunk without incremental value", async () => { await expect(observableStream).not.toEmitAnything(); }); + +test("handles errors thrown before initialCount is reached", async () => { + const client = new ApolloClient({ + link: createLink({ + async *friendList() { + yield await Promise.resolve(friends[0]); + throw new Error("bad"); + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 2) { + id + name + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + friendList: null, + }, + error: new CombinedGraphQLErrors({ + data: { friendList: null }, + errors: [ + { + message: "bad", + path: ["friendList"], + }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); + +test("handles errors thrown after initialCount is reached", async () => { + const client = new ApolloClient({ + link: createLink({ + async *friendList() { + yield await Promise.resolve(friends[0]); + throw new Error("bad"); + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + error: new CombinedGraphQLErrors({ + data: { friendList: [{ __typename: "Friend", id: "1", name: "Luke" }] }, + errors: [ + { + message: "bad", + path: ["friendList"], + }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); From 34f846bd08e4b84293085d877f17fd3ce3e245a6 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:36:30 -0600 Subject: [PATCH 123/254] Use toEmitSimilarValue --- .../streamGraphql17Alpha9.test.ts | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts index 270ffb8cd52..134adc49142 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts @@ -561,13 +561,13 @@ test("handles final chunk without incremental value", async () => { partial: true, }); - await expect(observableStream).toEmitTypedValue({ - data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], + await expect(observableStream).toEmitSimilarValue({ + expected: (previous) => ({ + ...previous, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, }), dataState: "complete", loading: false, From c0824bcd2c2c5d5109904f70df5f63e57faf7101 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:37:34 -0600 Subject: [PATCH 124/254] Add test for nested stream with defer --- .../streamGraphql17Alpha9.test.ts | 199 +++++++++++++++++- 1 file changed, 195 insertions(+), 4 deletions(-) diff --git a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts index 134adc49142..6abf9939c7e 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts @@ -124,6 +124,21 @@ function createLink(rootValue?: Record) { }); } +function promiseWithResolvers(): { + promise: Promise; + resolve: (value: T | Promise) => void; + reject: (reason?: any) => void; +} { + // these are assigned synchronously within the Promise constructor + let resolve!: (value: T | Promise) => void; + let reject!: (reason?: any) => void; + const promise = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + return { promise, resolve, reject }; +} + test("handles streamed scalar lists", async () => { const client = new ApolloClient({ link: createLink({ scalarList: ["apple", "banana", "orange"] }), @@ -569,10 +584,6 @@ test("handles final chunk without incremental value", async () => { networkStatus: NetworkStatus.ready, partial: false, }), - dataState: "complete", - loading: false, - networkStatus: NetworkStatus.ready, - partial: false, }); await expect(observableStream).not.toEmitAnything(); @@ -697,3 +708,183 @@ test("handles errors thrown after initialCount is reached", async () => { await expect(observableStream).not.toEmitAnything(); }); + +it("handles errors thrown due to null returned in non-null list items after initialCount is reached", async () => { + const client = new ApolloClient({ + link: createLink({ + nonNullFriendList: () => [friends[0], null, friends[1]], + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query { + nonNullFriendList @stream(initialCount: 1) { + id + name + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + nonNullFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + nonNullFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + error: new CombinedGraphQLErrors({ + data: { + nonNullFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Query.nonNullFriendList.", + path: ["nonNullFriendList", 1], + }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); + +it("handles stream when in parent deferred fragment", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + + const client = new ApolloClient({ + link: createLink({ + nestedObject: { + scalarField: () => slowFieldPromise, + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + }, + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query { + nestedObject { + ...DeferFragment @defer + } + } + fragment DeferFragment on NestedObject { + scalarField + nestedFriendList @stream(initialCount: 0) { + id + name + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + nestedObject: { + __typename: "NestedObject", + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + resolveSlowField("slow"); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + nestedObject: { + __typename: "NestedObject", + scalarField: "slow", + nestedFriendList: [], + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + nestedObject: { + __typename: "NestedObject", + scalarField: "slow", + nestedFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + nestedObject: { + __typename: "NestedObject", + scalarField: "slow", + nestedFriendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitSimilarValue({ + expected: (previous) => ({ + ...previous, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }), + }); + + await expect(observableStream).not.toEmitAnything(); +}); From 65b9949b1dc4e9b4df632fc25a32a6c670b6a36d Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:45:06 -0600 Subject: [PATCH 125/254] Add test for defer inside stream --- .../streamGraphql17Alpha9.test.ts | 98 +++++++++++++++++++ 1 file changed, 98 insertions(+) diff --git a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts index 6abf9939c7e..133c8a4d11a 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts @@ -888,3 +888,101 @@ it("handles stream when in parent deferred fragment", async () => { await expect(observableStream).not.toEmitAnything(); }); + +test("handles @defer inside @stream", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + const { + promise: iterableCompletionPromise, + resolve: resolveIterableCompletion, + } = promiseWithResolvers(); + + const client = new ApolloClient({ + link: createLink({ + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve({ + id: friends[1].id, + name: () => slowFieldPromise, + }); + await iterableCompletionPromise; + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query { + friendList @stream { + ...NameFragment @defer + id + } + } + fragment NameFragment on Friend { + name + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + resolveIterableCompletion(null); + + await expect(observableStream).toEmitSimilarValue({ + expected: (previous) => ({ + ...previous, + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + }), + }); + + resolveSlowField("Han"); + + await expect(observableStream).toEmitSimilarValue({ + expected: (previous) => ({ + ...previous, + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2" }, + ], + }), + dataState: "streaming", + }), + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); From 923b340b896496d3221c9450d938b6e3fa0aef22 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:48:32 -0600 Subject: [PATCH 126/254] Add promiseWithResolvers to testing utils --- src/testing/internal/index.ts | 1 + src/testing/internal/promiseWithResolvers.ts | 15 +++++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 src/testing/internal/promiseWithResolvers.ts diff --git a/src/testing/internal/index.ts b/src/testing/internal/index.ts index 37fad789108..070e644ad14 100644 --- a/src/testing/internal/index.ts +++ b/src/testing/internal/index.ts @@ -23,6 +23,7 @@ export { } from "./scenarios/index.js"; export { createClientWrapper, createMockWrapper } from "./renderHelpers.js"; export { actAsync } from "./rtl/actAsync.js"; +export { promiseWithResolvers } from "./promiseWithResolvers.js"; export { renderAsync } from "./rtl/renderAsync.js"; export { renderHookAsync } from "./rtl/renderHookAsync.js"; export { mockDefer20220824 } from "./multipart/mockDefer20220824.js"; diff --git a/src/testing/internal/promiseWithResolvers.ts b/src/testing/internal/promiseWithResolvers.ts new file mode 100644 index 00000000000..68283719b04 --- /dev/null +++ b/src/testing/internal/promiseWithResolvers.ts @@ -0,0 +1,15 @@ +export function promiseWithResolvers(): { + promise: Promise; + resolve: (value: T | Promise) => void; + reject: (reason?: any) => void; +} { + let resolve!: (value: T | Promise) => void; + let reject!: (reason?: any) => void; + + const promise = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + + return { promise, resolve, reject }; +} From 5050441806c939035451e70dd5d0bc420b069077 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 16:50:09 -0600 Subject: [PATCH 127/254] Use shared promiseWithResolvers helper --- .../streamGraphql17Alpha9.test.ts | 16 +--------------- .../__tests__/graphql17Alpha9/defer.test.ts | 18 +----------------- .../__tests__/graphql17Alpha9/stream.test.ts | 18 +----------------- 3 files changed, 3 insertions(+), 49 deletions(-) diff --git a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts index 133c8a4d11a..c414da5a7bc 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts @@ -28,6 +28,7 @@ import { markAsStreaming, mockDeferStreamGraphQL17Alpha9, ObservableStream, + promiseWithResolvers, } from "@apollo/client/testing/internal"; const friendType = new GraphQLObjectType({ @@ -124,21 +125,6 @@ function createLink(rootValue?: Record) { }); } -function promiseWithResolvers(): { - promise: Promise; - resolve: (value: T | Promise) => void; - reject: (reason?: any) => void; -} { - // these are assigned synchronously within the Promise constructor - let resolve!: (value: T | Promise) => void; - let reject!: (reason?: any) => void; - const promise = new Promise((res, rej) => { - resolve = res; - reject = rej; - }); - return { promise, resolve, reject }; -} - test("handles streamed scalar lists", async () => { const client = new ApolloClient({ link: createLink({ scalarList: ["apple", "banana", "orange"] }), diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts index b61e7d2d4e7..06f47d063bc 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts @@ -30,6 +30,7 @@ import { markAsStreaming, mockDeferStreamGraphQL17Alpha9, ObservableStream, + promiseWithResolvers, wait, } from "@apollo/client/testing/internal"; @@ -153,23 +154,6 @@ function resolveOnNextTick(): Promise { return Promise.resolve(undefined); } -type PromiseOrValue = Promise | T; - -function promiseWithResolvers(): { - promise: Promise; - resolve: (value: T | PromiseOrValue) => void; - reject: (reason?: any) => void; -} { - // these are assigned synchronously within the Promise constructor - let resolve!: (value: T | PromiseOrValue) => void; - let reject!: (reason?: any) => void; - const promise = new Promise((res, rej) => { - resolve = res; - reject = rej; - }); - return { promise, resolve, reject }; -} - async function* run( document: DocumentNode, rootValue: Record = { hero }, diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index de6ea29806a..db3beac262b 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -28,6 +28,7 @@ import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { markAsStreaming, ObservableStream, + promiseWithResolvers, } from "@apollo/client/testing/internal"; // This is the test setup of the `graphql-js` v17.0.0-alpha.9 release: @@ -97,23 +98,6 @@ function resolveOnNextTick(): Promise { return Promise.resolve(undefined); } -type PromiseOrValue = Promise | T; - -function promiseWithResolvers(): { - promise: Promise; - resolve: (value: T | PromiseOrValue) => void; - reject: (reason?: any) => void; -} { - // these are assigned synchronously within the Promise constructor - let resolve!: (value: T | PromiseOrValue) => void; - let reject!: (reason?: any) => void; - const promise = new Promise((res, rej) => { - resolve = res; - reject = rej; - }); - return { promise, resolve, reject }; -} - async function* run( document: DocumentNode, rootValue: unknown = {}, From fbde032e721a553b7ee6d025cf7f7a8c75a98aa6 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 17:44:34 -0600 Subject: [PATCH 128/254] Temp rename --- ...reamGraphql17Alpha9.test.ts => streamGraphQL17Alpha90.test.ts} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/core/__tests__/client.watchQuery/{streamGraphql17Alpha9.test.ts => streamGraphQL17Alpha90.test.ts} (100%) diff --git a/src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha90.test.ts similarity index 100% rename from src/core/__tests__/client.watchQuery/streamGraphql17Alpha9.test.ts rename to src/core/__tests__/client.watchQuery/streamGraphQL17Alpha90.test.ts From d32ef4316640392ba9e3093d1da4d1721b17ae3e Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 17:44:49 -0600 Subject: [PATCH 129/254] Fix rename --- ...reamGraphQL17Alpha90.test.ts => streamGraphQL17Alpha9.test.ts} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/core/__tests__/client.watchQuery/{streamGraphQL17Alpha90.test.ts => streamGraphQL17Alpha9.test.ts} (100%) diff --git a/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha90.test.ts b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts similarity index 100% rename from src/core/__tests__/client.watchQuery/streamGraphQL17Alpha90.test.ts rename to src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts From 2b336ce9f11758b5512e0eea841c8f2214db65fe Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 18:46:54 -0600 Subject: [PATCH 130/254] Add tests for stream with defer20220824 --- .../__tests__/defer20220824/stream.test.ts | 1733 +++++++++++++++++ 1 file changed, 1733 insertions(+) create mode 100644 src/incremental/handlers/__tests__/defer20220824/stream.test.ts diff --git a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts new file mode 100644 index 00000000000..ab7c27a0c1c --- /dev/null +++ b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts @@ -0,0 +1,1733 @@ +import assert from "node:assert"; + +import type { + FormattedExecutionResult, + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, +} from "graphql-17-alpha2"; +import { + experimentalExecuteIncrementally, + GraphQLID, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, +} from "graphql-17-alpha2"; +import { from } from "rxjs"; + +import type { DocumentNode } from "@apollo/client"; +import { + ApolloClient, + ApolloLink, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import { + markAsStreaming, + ObservableStream, + promiseWithResolvers, +} from "@apollo/client/testing/internal"; + +// This is the test setup of the `graphql-js` v17.0.0-alpha.2 release: +// https://github.com/graphql/graphql-js/blob/042002c3d332d36c67861f5b37d39b74d54d97d4/src/execution/__tests__/stream-test.ts + +const friendType = new GraphQLObjectType({ + fields: { + id: { type: GraphQLID }, + name: { type: GraphQLString }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, + }, + name: "Friend", +}); + +const friends = [ + { name: "Luke", id: 1 }, + { name: "Han", id: 2 }, + { name: "Leia", id: 3 }, +]; + +const query = new GraphQLObjectType({ + fields: { + scalarList: { + type: new GraphQLList(GraphQLString), + }, + scalarListList: { + type: new GraphQLList(new GraphQLList(GraphQLString)), + }, + friendList: { + type: new GraphQLList(friendType), + }, + nonNullFriendList: { + type: new GraphQLList(new GraphQLNonNull(friendType)), + }, + nestedObject: { + type: new GraphQLObjectType({ + name: "NestedObject", + fields: { + scalarField: { + type: GraphQLString, + }, + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + nestedFriendList: { type: new GraphQLList(friendType) }, + deeperNestedObject: { + type: new GraphQLObjectType({ + name: "DeeperNestedObject", + fields: { + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + deeperNestedFriendList: { type: new GraphQLList(friendType) }, + }, + }), + }, + }, + }), + }, + }, + name: "Query", +}); + +const schema = new GraphQLSchema({ query }); + +async function* run( + document: DocumentNode, + rootValue: unknown = {} +): AsyncGenerator< + | FormattedInitialIncrementalExecutionResult + | FormattedSubsequentIncrementalExecutionResult + | FormattedExecutionResult, + void +> { + const result = await experimentalExecuteIncrementally({ + schema, + document, + rootValue, + }); + + if ("initialResult" in result) { + yield JSON.parse(JSON.stringify(result.initialResult)); + + for await (const patch of result.subsequentResults) { + yield JSON.parse(JSON.stringify(patch)); + } + } else { + yield JSON.parse(JSON.stringify(result)); + } +} + +function createSchemaLink(rootValue?: Record) { + return new ApolloLink((operation) => { + return from(run(operation.query, rootValue)); + }); +} + +describe("Execute: stream directive", () => { + it("Can stream a list field", async () => { + const query = gql` + query { + scalarList @stream(initialCount: 1) + } + `; + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + scalarList: () => ["apple", "banana", "coconut"], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple"], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple", "banana"], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple", "banana", "coconut"], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Can use default value of initialCount", async () => { + const query = gql` + query { + scalarList @stream + } + `; + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + scalarList: () => ["apple", "banana", "coconut"], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple"], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple", "banana"], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple", "banana"], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple", "banana", "coconut"], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Negative values of initialCount throw field errors", async () => { + // from a client perspective, a regular graphql query + }); + + it.skip("Returns label from stream directive", async () => { + // from a client perspective, a repeat of a previous test + }); + + it.skip("Can disable @stream using if argument", async () => { + // from a client perspective, a regular graphql query + }); + + it("Does not disable stream with null if argument", async () => { + const query = gql` + query ($shouldStream: Boolean) { + scalarList @stream(initialCount: 2, if: $shouldStream) + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + scalarList: () => ["apple", "banana", "coconut"], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple", "banana"], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["apple", "banana", "coconut"], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Can stream multi-dimensional lists", async () => { + const query = gql` + query { + scalarListList @stream(initialCount: 1) + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + scalarListList: () => [ + ["apple", "apple", "apple"], + ["banana", "banana", "banana"], + ["coconut", "coconut", "coconut"], + ], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarListList: [["apple", "apple", "apple"]], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarListList: [ + ["apple", "apple", "apple"], + ["banana", "banana", "banana"], + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarListList: [ + ["apple", "apple", "apple"], + ["banana", "banana", "banana"], + ["coconut", "coconut", "coconut"], + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Can stream a field that returns a list of promises", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { + name: "Luke", + id: "1", + }, + { + name: "Han", + id: "2", + }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Can stream in correct order with lists of promises", async () => { + const query = gql` + query { + friendList @stream(initialCount: 0) { + name + id + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + }); + + it("Handles rejections in a field that returns a list of promises before initialCount is reached", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => + friends.map((f, i) => { + if (i === 1) { + return Promise.reject(new Error("bad")); + } + return Promise.resolve(f); + }), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }, null], + }, + errors: [ + { + message: "bad", + locations: [{ line: 3, column: 9 }], + path: ["friendList", 1], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + null, + { name: "Leia", id: "3" }, + ], + }, + errors: [ + { + message: "bad", + locations: [{ line: 3, column: 9 }], + path: ["friendList", 1], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Handles rejections in a field that returns a list of promises after initialCount is reached", async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + name + id + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => + friends.map((f, i) => { + if (i === 1) { + return Promise.reject(new Error("bad")); + } + return Promise.resolve(f); + }), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }, null], + }, + errors: [ + { + message: "bad", + locations: [{ line: 3, column: 9 }], + path: ["friendList", 1], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + null, + { name: "Leia", id: "3" }, + ], + }, + errors: [ + { + message: "bad", + locations: [{ line: 3, column: 9 }], + path: ["friendList", 1], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Can stream a field that returns an async iterable", async () => { + const query = gql` + query { + friendList @stream { + name + id + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + yield await Promise.resolve(friends[2]); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Can stream a field that returns an async iterable, using a non-zero initialCount", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + yield await Promise.resolve(friends[2]); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Negative values of initialCount throw field errors on a field that returns an async iterable", async () => { + // from a client persective, a regular graphql query + }); + + it.skip("Can handle concurrent calls to .next() without waiting", async () => { + // from a client persective, a repeat of a previous test + }); + + it.skip("Handles error thrown in async iterable before initialCount is reached", async () => { + // from a client perspective, a regular graphql query + }); + + it("Handles error thrown in async iterable after initialCount is reached", async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + name + id + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + throw new Error("bad"); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ name: "Luke", id: "1" }, null], + }, + errors: [ + { + message: "bad", + path: ["friendList", 1], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Handles null returned in non-null list items after initialCount is reached", async () => { + const query = gql` + query { + nonNullFriendList @stream(initialCount: 1) { + name + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nonNullFriendList: () => [friends[0], null], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nonNullFriendList: [{ name: "Luke" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nonNullFriendList: [{ name: "Luke" }], + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Query.nonNullFriendList.", + path: ["nonNullFriendList", 1], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Handles null returned in non-null async iterable list items after initialCount is reached", async () => { + // from a client perspective, a repeat of the previous test + }); + + it("Handles errors thrown by completeValue after initialCount is reached", async () => { + const query = gql` + query { + scalarList @stream(initialCount: 1) + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + scalarList: () => [friends[0].name, {}], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["Luke"], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + scalarList: ["Luke", null], + }, + errors: [ + { + message: "String cannot represent value: {}", + path: ["scalarList", 1], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Handles async errors thrown by completeValue after initialCount is reached", async () => { + const query = gql` + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nonNullFriendList: () => [ + Promise.resolve({ nonNullName: friends[0].name }), + Promise.resolve({ + nonNullName: () => Promise.reject(new Error("Oops")), + }), + Promise.resolve({ nonNullName: friends[1].name }), + ], + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nonNullFriendList: [{ nonNullName: "Luke" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nonNullFriendList: [{ nonNullName: "Luke" }], + }, + errors: [ + { + message: "Oops", + path: ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Handles async errors thrown by completeValue after initialCount is reached from async iterable", async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve({ nonNullName: friends[0].name }); + yield await Promise.resolve({ + nonNullName: () => Promise.reject(new Error("Oops")), + }); + yield await Promise.resolve({ nonNullName: friends[1].name }); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ nonNullName: "Luke" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ nonNullName: "Luke" }, null], + }, + errors: [ + { + message: "Oops", + path: ["friendList", 1, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ nonNullName: "Luke" }, null, { nonNullName: "Han" }], + }, + errors: [ + { + message: "Oops", + path: ["friendList", 1, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ nonNullName: "Luke" }, null, { nonNullName: "Han" }], + }, + errors: [ + { + message: "Oops", + path: ["friendList", 1, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Filters payloads that are nulled", async () => { + // from a client perspective, a regular graphql query + }); + + it("Does not filter payloads when null error is in a different path", async () => { + const query = gql` + query { + otherNestedObject: nestedObject { + ... @defer { + scalarField + } + } + nestedObject { + nestedFriendList @stream(initialCount: 0) { + name + } + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nestedObject: { + scalarField: () => Promise.reject(new Error("Oops")), + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + }, + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + otherNestedObject: {}, + nestedObject: { nestedFriendList: [] }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + otherNestedObject: { + scalarField: null, + }, + nestedObject: { nestedFriendList: [{ name: "Luke" }] }, + }, + errors: [ + { + message: "Oops", + path: ["otherNestedObject", "scalarField"], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + otherNestedObject: { + scalarField: null, + }, + nestedObject: { nestedFriendList: [{ name: "Luke" }] }, + }, + errors: [ + { + message: "Oops", + path: ["otherNestedObject", "scalarField"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Filters stream payloads that are nulled in a deferred payload", async () => { + const query = gql` + query { + nestedObject { + ... @defer { + deeperNestedObject { + nonNullScalarField + deeperNestedFriendList @stream(initialCount: 0) { + name + } + } + } + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nestedObject: { + deeperNestedObject: { + nonNullScalarField: () => Promise.resolve(null), + async *deeperNestedFriendList() { + yield await Promise.resolve(friends[0]); + }, + }, + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: {}, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + deeperNestedObject: null, + }, + }, + errors: [ + { + message: + "Cannot return null for non-nullable field DeeperNestedObject.nonNullScalarField.", + path: ["nestedObject", "deeperNestedObject", "nonNullScalarField"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Filters defer payloads that are nulled in a stream response", async () => { + const query = gql` + query { + friendList @stream(initialCount: 0) { + nonNullName + ... @defer { + name + } + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve({ + name: friends[0].name, + nonNullName: () => Promise.resolve(null), + }); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [null], + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Friend.nonNullName.", + path: ["friendList", 0, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [null], + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Friend.nonNullName.", + path: ["friendList", 0, "nonNullName"], + }, + ], + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Returns iterator and ignores errors when stream payloads are filtered", async () => { + // from a client perspective, a repeat of a previous test + }); + + it("Handles promises returned by completeValue after initialCount is reached", async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + yield await Promise.resolve({ + id: friends[2].id, + name: () => Promise.resolve(friends[2].name), + }); + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1", name: "Luke" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + { id: "3", name: "Leia" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + { id: "3", name: "Leia" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Returns payloads in correct order when parent deferred fragment resolves slower than stream", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + const query = gql` + query { + nestedObject { + ...DeferFragment @defer + } + } + fragment DeferFragment on NestedObject { + scalarField + nestedFriendList @stream(initialCount: 0) { + name + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nestedObject: { + scalarField: () => slowFieldPromise, + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + }, + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: {}, + }, + }); + expect(request.hasNext).toBe(true); + } + + resolveSlowField("slow"); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it("Can @defer fields that are resolved after async iterable is complete", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + const { + promise: iterableCompletionPromise, + resolve: resolveIterableCompletion, + } = promiseWithResolvers(); + + const query = gql` + query { + friendList @stream(initialCount: 1, label: "stream-label") { + ...NameFragment @defer(label: "DeferName") @defer(label: "DeferName") + id + } + } + fragment NameFragment on Friend { + name + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve({ + id: friends[1].id, + name: () => slowFieldPromise, + }); + await iterableCompletionPromise; + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + resolveIterableCompletion(null); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [{ id: "1", name: "Luke" }, { id: "2" }], + }, + }); + expect(request.hasNext).toBe(true); + } + + resolveSlowField("Han"); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + friendList: [ + { id: "1", name: "Luke" }, + { id: "2", name: "Han" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } + }); + + it.skip("Can @defer fields that are resolved before async iterable is complete", async () => { + // from a client perspective, a repeat of the previous test + }); + + it.skip("Returns underlying async iterables when returned generator is returned", async () => { + // not interesting from a client perspective + }); + + it.skip("Can return async iterable when underlying iterable does not have a return method", async () => { + // not interesting from a client perspective + }); + + it.skip("Returns underlying async iterables when returned generator is thrown", async () => { + // not interesting from a client perspective + }); +}); + +// quick smoke test. More exhaustive `@stream` tests can be found in +// src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts +test("Defer20220824Handler can be used with `ApolloClient`", async () => { + const client = new ApolloClient({ + link: createSchemaLink({ friendList: friends }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); From 41ae9819335438091936e60db2e467416ca9df79 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 18:48:08 -0600 Subject: [PATCH 131/254] Move defer20220824 defer tests to subfolder --- .../{defer20220824.test.ts => defer20220824/defer.test.ts} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename src/incremental/handlers/__tests__/{defer20220824.test.ts => defer20220824/defer.test.ts} (99%) diff --git a/src/incremental/handlers/__tests__/defer20220824.test.ts b/src/incremental/handlers/__tests__/defer20220824/defer.test.ts similarity index 99% rename from src/incremental/handlers/__tests__/defer20220824.test.ts rename to src/incremental/handlers/__tests__/defer20220824/defer.test.ts index e412199e2a6..51f2eb9c874 100644 --- a/src/incremental/handlers/__tests__/defer20220824.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/defer.test.ts @@ -35,7 +35,7 @@ import { import { hasIncrementalChunks, // eslint-disable-next-line local-rules/no-relative-imports -} from "../defer20220824.js"; +} from "../../defer20220824.js"; // This is the test setup of the `graphql-js` v17.0.0-alpha.2 release: // https://github.com/graphql/graphql-js/blob/364cd71d1a26eb6f62661efd7fa399e91332d30d/src/execution/__tests__/defer-test.ts From aa3924ad24720767689f3c9bc1231344635f503e Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 18:54:22 -0600 Subject: [PATCH 132/254] Update types for defer20220824 handler --- src/incremental/handlers/defer20220824.ts | 27 +++++++++++++++++------ 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/src/incremental/handlers/defer20220824.ts b/src/incremental/handlers/defer20220824.ts index 0ed7cd97f59..ba3008c7c57 100644 --- a/src/incremental/handlers/defer20220824.ts +++ b/src/incremental/handlers/defer20220824.ts @@ -29,6 +29,7 @@ export declare namespace Defer20220824Handler { errors?: ReadonlyArray; extensions?: Record; hasNext: boolean; + incremental?: ReadonlyArray>; }; export type SubsequentResult> = { @@ -36,20 +37,32 @@ export declare namespace Defer20220824Handler { errors?: ReadonlyArray; extensions?: Record; hasNext: boolean; - incremental?: Array>; + incremental?: Array>; }; - export type Chunk> = - | InitialResult - | SubsequentResult; - - export type IncrementalDeferPayload> = { - data?: TData | null | undefined; + export type IncrementalDeferResult> = { + data?: TData | null; errors?: ReadonlyArray; extensions?: Record; path?: Incremental.Path; label?: string; }; + + export type IncrementalStreamResult> = { + errors?: ReadonlyArray; + items?: TData; + path?: Incremental.Path; + label?: string; + extensions?: Record; + }; + + export type IncrementalResult> = + | IncrementalDeferResult + | IncrementalStreamResult; + + export type Chunk> = + | InitialResult + | SubsequentResult; } class DeferRequest> From 020ba183f7b3ecd3cac4b1a818bdfdf0865dc699 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 19:25:16 -0600 Subject: [PATCH 133/254] Fix assertion on test --- .../handlers/__tests__/defer20220824/stream.test.ts | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts index ab7c27a0c1c..ca21dc4996a 100644 --- a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts @@ -153,19 +153,6 @@ describe("Execute: stream directive", () => { expect(request.hasNext).toBe(true); } - { - const { value: chunk, done } = await incoming.next(); - - assert(!done); - assert(handler.isIncrementalResult(chunk)); - expect(request.handle(undefined, chunk)).toStrictEqualTyped({ - data: { - scalarList: ["apple", "banana"], - }, - }); - expect(request.hasNext).toBe(true); - } - { const { value: chunk, done } = await incoming.next(); From 4fbec80532bccf0109aad4684fc9c7004e2efc63 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 19:29:39 -0600 Subject: [PATCH 134/254] First pass at implementing stream for old format --- src/incremental/handlers/defer20220824.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/incremental/handlers/defer20220824.ts b/src/incremental/handlers/defer20220824.ts index ba3008c7c57..5dd0b9cea50 100644 --- a/src/incremental/handlers/defer20220824.ts +++ b/src/incremental/handlers/defer20220824.ts @@ -102,7 +102,12 @@ class DeferRequest> if (hasIncrementalChunks(chunk)) { const merger = new DeepMerger(); for (const incremental of chunk.incremental) { - let { data, path, errors, extensions } = incremental; + const { path, errors, extensions } = incremental; + let data = + "items" in incremental ? incremental.items?.[0] + : "data" in incremental ? incremental.data + : undefined; + if (data && path) { for (let i = path.length - 1; i >= 0; --i) { const key = path[i]; From d6a051b4d9cb598bd091783fa371997430a25c86 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 19:31:57 -0600 Subject: [PATCH 135/254] Fix more incorrect assertions --- .../__tests__/defer20220824/stream.test.ts | 44 +------------------ 1 file changed, 1 insertion(+), 43 deletions(-) diff --git a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts index ca21dc4996a..3d8ca8294ca 100644 --- a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts @@ -193,32 +193,6 @@ describe("Execute: stream directive", () => { expect(request.hasNext).toBe(true); } - { - const { value: chunk, done } = await incoming.next(); - - assert(!done); - assert(handler.isIncrementalResult(chunk)); - expect(request.handle(undefined, chunk)).toStrictEqualTyped({ - data: { - scalarList: ["apple"], - }, - }); - expect(request.hasNext).toBe(true); - } - - { - const { value: chunk, done } = await incoming.next(); - - assert(!done); - assert(handler.isIncrementalResult(chunk)); - expect(request.handle(undefined, chunk)).toStrictEqualTyped({ - data: { - scalarList: ["apple", "banana"], - }, - }); - expect(request.hasNext).toBe(true); - } - { const { value: chunk, done } = await incoming.next(); @@ -330,22 +304,6 @@ describe("Execute: stream directive", () => { expect(request.hasNext).toBe(true); } - { - const { value: chunk, done } = await incoming.next(); - - assert(!done); - assert(handler.isIncrementalResult(chunk)); - expect(request.handle(undefined, chunk)).toStrictEqualTyped({ - data: { - scalarListList: [ - ["apple", "apple", "apple"], - ["banana", "banana", "banana"], - ], - }, - }); - expect(request.hasNext).toBe(true); - } - { const { value: chunk, done } = await incoming.next(); @@ -494,7 +452,7 @@ describe("Execute: stream directive", () => { ], }, }); - expect(request.hasNext).toBe(true); + expect(request.hasNext).toBe(false); } }); From 99bc51f647e93e057dceec6ba0879deaec81cabf Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 19:34:19 -0600 Subject: [PATCH 136/254] Remove locations from errors in assertions --- .../handlers/__tests__/defer20220824/stream.test.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts index 3d8ca8294ca..b650584725d 100644 --- a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts @@ -491,7 +491,6 @@ describe("Execute: stream directive", () => { errors: [ { message: "bad", - locations: [{ line: 3, column: 9 }], path: ["friendList", 1], }, ], @@ -515,7 +514,6 @@ describe("Execute: stream directive", () => { errors: [ { message: "bad", - locations: [{ line: 3, column: 9 }], path: ["friendList", 1], }, ], @@ -572,7 +570,6 @@ describe("Execute: stream directive", () => { errors: [ { message: "bad", - locations: [{ line: 3, column: 9 }], path: ["friendList", 1], }, ], @@ -596,7 +593,6 @@ describe("Execute: stream directive", () => { errors: [ { message: "bad", - locations: [{ line: 3, column: 9 }], path: ["friendList", 1], }, ], From 347eb206ee4379946653d782778497561e78ad0b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 19:39:57 -0600 Subject: [PATCH 137/254] Handle merging null from stream --- src/incremental/handlers/defer20220824.ts | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/incremental/handlers/defer20220824.ts b/src/incremental/handlers/defer20220824.ts index 5dd0b9cea50..16ca682cd6a 100644 --- a/src/incremental/handlers/defer20220824.ts +++ b/src/incremental/handlers/defer20220824.ts @@ -104,11 +104,15 @@ class DeferRequest> for (const incremental of chunk.incremental) { const { path, errors, extensions } = incremental; let data = + // The item merged from a `@stream` chunk is always the first item in + // the `items` array "items" in incremental ? incremental.items?.[0] - : "data" in incremental ? incremental.data + // Ensure `data: null` isn't merged for `@defer` responses by + // falling back to `undefined` + : "data" in incremental ? incremental.data ?? undefined : undefined; - if (data && path) { + if (data !== undefined && path) { for (let i = path.length - 1; i >= 0; --i) { const key = path[i]; const isNumericKey = !isNaN(+key); From 0d271efc5000438c366f7ab0926af89d43fc4c52 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 19:41:13 -0600 Subject: [PATCH 138/254] Fix more incorrect assertions --- .../__tests__/defer20220824/stream.test.ts | 41 ------------------- 1 file changed, 41 deletions(-) diff --git a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts index b650584725d..485c8a69b69 100644 --- a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts @@ -558,25 +558,6 @@ describe("Execute: stream directive", () => { expect(request.hasNext).toBe(true); } - { - const { value: chunk, done } = await incoming.next(); - - assert(!done); - assert(handler.isIncrementalResult(chunk)); - expect(request.handle(undefined, chunk)).toStrictEqualTyped({ - data: { - friendList: [{ name: "Luke", id: "1" }, null], - }, - errors: [ - { - message: "bad", - path: ["friendList", 1], - }, - ], - }); - expect(request.hasNext).toBe(true); - } - { const { value: chunk, done } = await incoming.next(); @@ -1131,28 +1112,6 @@ describe("Execute: stream directive", () => { expect(request.hasNext).toBe(true); } - { - const { value: chunk, done } = await incoming.next(); - - assert(!done); - assert(handler.isIncrementalResult(chunk)); - expect(request.handle(undefined, chunk)).toStrictEqualTyped({ - data: { - otherNestedObject: { - scalarField: null, - }, - nestedObject: { nestedFriendList: [{ name: "Luke" }] }, - }, - errors: [ - { - message: "Oops", - path: ["otherNestedObject", "scalarField"], - }, - ], - }); - expect(request.hasNext).toBe(true); - } - { const { value: chunk, done } = await incoming.next(); From 7d1cc64db1f3669189ecde99dfd1b770329c8113 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 19:44:38 -0600 Subject: [PATCH 139/254] Initialize merger on class initialization. Rename to merge --- src/incremental/handlers/defer20220824.ts | 25 ++++++++--------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/src/incremental/handlers/defer20220824.ts b/src/incremental/handlers/defer20220824.ts index 16ca682cd6a..07a9fcb7888 100644 --- a/src/incremental/handlers/defer20220824.ts +++ b/src/incremental/handlers/defer20220824.ts @@ -74,13 +74,11 @@ class DeferRequest> private errors: Array = []; private extensions: Record = {}; private data: any = {}; + private merger = new DeepMerger(); - private mergeIn( - normalized: FormattedExecutionResult, - merger: DeepMerger - ) { + private merge(normalized: FormattedExecutionResult) { if (normalized.data !== undefined) { - this.data = merger.merge(this.data, normalized.data); + this.data = this.merger.merge(this.data, normalized.data); } if (normalized.errors) { this.errors.push(...normalized.errors); @@ -96,11 +94,9 @@ class DeferRequest> ): FormattedExecutionResult { this.hasNext = chunk.hasNext; this.data = cacheData; - - this.mergeIn(chunk, new DeepMerger()); + this.merge(chunk); if (hasIncrementalChunks(chunk)) { - const merger = new DeepMerger(); for (const incremental of chunk.incremental) { const { path, errors, extensions } = incremental; let data = @@ -121,14 +117,11 @@ class DeferRequest> data = parent as typeof data; } } - this.mergeIn( - { - errors, - extensions, - data: data ? (data as TData) : undefined, - }, - merger - ); + this.merge({ + errors, + extensions, + data: data ? (data as TData) : undefined, + }); } } From 85a1dcca9dfec161c4d2982eb2184419c8af4e46 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 19:49:11 -0600 Subject: [PATCH 140/254] Add test file for client.watchQuery with stream on old format --- .../streamDefer20220824.test.ts | 971 ++++++++++++++++++ 1 file changed, 971 insertions(+) create mode 100644 src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts diff --git a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts new file mode 100644 index 00000000000..ea598575307 --- /dev/null +++ b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts @@ -0,0 +1,971 @@ +import type { + FormattedExecutionResult, + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, +} from "graphql-17-alpha2"; +import { + experimentalExecuteIncrementally, + GraphQLID, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, +} from "graphql-17-alpha2"; +import { from } from "rxjs"; + +import type { DocumentNode } from "@apollo/client"; +import { + ApolloClient, + ApolloLink, + CombinedGraphQLErrors, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import { + markAsStreaming, + mockDefer20220824, + ObservableStream, + promiseWithResolvers, +} from "@apollo/client/testing/internal"; + +const friendType = new GraphQLObjectType({ + fields: { + id: { type: GraphQLID }, + name: { type: GraphQLString }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, + }, + name: "Friend", +}); + +const friends = [ + { name: "Luke", id: 1 }, + { name: "Han", id: 2 }, + { name: "Leia", id: 3 }, +]; + +const query = new GraphQLObjectType({ + fields: { + scalarList: { + type: new GraphQLList(GraphQLString), + }, + scalarListList: { + type: new GraphQLList(new GraphQLList(GraphQLString)), + }, + friendList: { + type: new GraphQLList(friendType), + }, + nonNullFriendList: { + type: new GraphQLList(new GraphQLNonNull(friendType)), + }, + nestedObject: { + type: new GraphQLObjectType({ + name: "NestedObject", + fields: { + scalarField: { + type: GraphQLString, + }, + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + nestedFriendList: { type: new GraphQLList(friendType) }, + deeperNestedObject: { + type: new GraphQLObjectType({ + name: "DeeperNestedObject", + fields: { + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + deeperNestedFriendList: { type: new GraphQLList(friendType) }, + }, + }), + }, + }, + }), + }, + }, + name: "Query", +}); + +const schema = new GraphQLSchema({ query }); + +async function* run( + document: DocumentNode, + rootValue: unknown = {} +): AsyncGenerator< + | FormattedInitialIncrementalExecutionResult + | FormattedSubsequentIncrementalExecutionResult + | FormattedExecutionResult, + void +> { + const result = await experimentalExecuteIncrementally({ + schema, + document, + rootValue, + }); + + if ("initialResult" in result) { + yield JSON.parse(JSON.stringify(result.initialResult)); + + for await (const patch of result.subsequentResults) { + yield JSON.parse(JSON.stringify(patch)); + } + } else { + yield JSON.parse(JSON.stringify(result)); + } +} + +function createLink(rootValue?: Record) { + return new ApolloLink((operation) => { + return from(run(operation.query, rootValue)); + }); +} + +test("handles streamed scalar lists", async () => { + const client = new ApolloClient({ + link: createLink({ scalarList: ["apple", "banana", "orange"] }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query ScalarListQuery { + scalarList @stream(initialCount: 1) + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + scalarList: ["apple"], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + scalarList: ["apple", "banana", "orange"], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); + +test("handles streamed multi-dimensional lists", async () => { + const client = new ApolloClient({ + link: createLink({ + scalarListList: [ + ["apple", "apple", "apple"], + ["banana", "banana", "banana"], + ["coconut", "coconut", "coconut"], + ], + }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query ScalarListQuery { + scalarListList @stream(initialCount: 1) + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + scalarListList: [["apple", "apple", "apple"]], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + scalarListList: [ + ["apple", "apple", "apple"], + ["banana", "banana", "banana"], + ["coconut", "coconut", "coconut"], + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); + +test("merges cache updates that happen concurrently", async () => { + const stream = mockDefer20220824(); + const client = new ApolloClient({ + link: stream.httpLink, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + stream.enqueueInitialChunk({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + hasNext: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + client.cache.writeFragment({ + id: "Friend:1", + fragment: gql` + fragment FriendName on Friend { + name + } + `, + data: { + name: "Jedi", + }, + }); + + stream.enqueueSubsequentChunk({ + incremental: [ + { + items: [{ __typename: "Friend", id: "2", name: "Han" }] as any, + path: ["friendList", 1], + }, + { + items: [{ __typename: "Friend", id: "3", name: "Leia" }] as any, + path: ["friendList", 2], + }, + ], + hasNext: false, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + friendList: [ + { + __typename: "Friend", + id: "1", + name: "Jedi", // updated from cache + }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); + +test("handles errors from items before initialCount is reached", async () => { + const client = new ApolloClient({ + link: createLink({ + friendList: () => + friends.map((friend, i) => { + if (i === 1) { + return Promise.reject(new Error("bad")); + } + + return Promise.resolve(friend); + }), + }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 2) { + id + name + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [{ message: "bad", path: ["friendList", 1] }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [{ message: "bad", path: ["friendList", 1] }], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); + +test("handles errors from items after initialCount is reached", async () => { + const client = new ApolloClient({ + link: createLink({ + friendList: () => + friends.map((friend, i) => { + if (i === 1) { + return Promise.reject(new Error("bad")); + } + + return Promise.resolve(friend); + }), + }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [{ message: "bad", path: ["friendList", 1] }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [{ message: "bad", path: ["friendList", 1] }], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); + +test("handles final chunk without incremental value", async () => { + const client = new ApolloClient({ + link: createLink({ + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + yield await Promise.resolve(friends[2]); + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream { + id + name + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitSimilarValue({ + expected: (previous) => ({ + ...previous, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }), + }); + + await expect(observableStream).not.toEmitAnything(); +}); + +test("handles errors thrown before initialCount is reached", async () => { + const client = new ApolloClient({ + link: createLink({ + async *friendList() { + yield await Promise.resolve(friends[0]); + throw new Error("bad"); + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 2) { + id + name + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + friendList: null, + }, + error: new CombinedGraphQLErrors({ + data: { friendList: null }, + errors: [ + { + message: "bad", + path: ["friendList"], + }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); + +test("handles errors thrown after initialCount is reached", async () => { + const client = new ApolloClient({ + link: createLink({ + async *friendList() { + yield await Promise.resolve(friends[0]); + throw new Error("bad"); + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query FriendListQuery { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + error: new CombinedGraphQLErrors({ + data: { friendList: [{ __typename: "Friend", id: "1", name: "Luke" }] }, + errors: [ + { + message: "bad", + path: ["friendList"], + }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); + +it("handles errors thrown due to null returned in non-null list items after initialCount is reached", async () => { + const client = new ApolloClient({ + link: createLink({ + nonNullFriendList: () => [friends[0], null, friends[1]], + }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query { + nonNullFriendList @stream(initialCount: 1) { + id + name + } + } + `; + + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + nonNullFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + nonNullFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + error: new CombinedGraphQLErrors({ + data: { + nonNullFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + errors: [ + { + message: + "Cannot return null for non-nullable field Query.nonNullFriendList.", + path: ["nonNullFriendList", 1], + }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); + +it("handles stream when in parent deferred fragment", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + + const client = new ApolloClient({ + link: createLink({ + nestedObject: { + scalarField: () => slowFieldPromise, + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + }, + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query { + nestedObject { + ...DeferFragment @defer + } + } + fragment DeferFragment on NestedObject { + scalarField + nestedFriendList @stream(initialCount: 0) { + id + name + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + nestedObject: { + __typename: "NestedObject", + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + resolveSlowField("slow"); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + nestedObject: { + __typename: "NestedObject", + scalarField: "slow", + nestedFriendList: [], + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + nestedObject: { + __typename: "NestedObject", + scalarField: "slow", + nestedFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + nestedObject: { + __typename: "NestedObject", + scalarField: "slow", + nestedFriendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(observableStream).toEmitSimilarValue({ + expected: (previous) => ({ + ...previous, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }), + }); + + await expect(observableStream).not.toEmitAnything(); +}); + +test("handles @defer inside @stream", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + const { + promise: iterableCompletionPromise, + resolve: resolveIterableCompletion, + } = promiseWithResolvers(); + + const client = new ApolloClient({ + link: createLink({ + async *friendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve({ + id: friends[1].id, + name: () => slowFieldPromise, + }); + await iterableCompletionPromise; + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query { + friendList @stream { + ...NameFragment @defer + id + } + } + fragment NameFragment on Friend { + name + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + resolveIterableCompletion(null); + + await expect(observableStream).toEmitSimilarValue({ + expected: (previous) => ({ + ...previous, + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + }), + }); + + resolveSlowField("Han"); + + await expect(observableStream).toEmitSimilarValue({ + expected: (previous) => ({ + ...previous, + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2" }, + ], + }), + dataState: "streaming", + }), + }); + + await expect(observableStream).toEmitTypedValue({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + await expect(observableStream).not.toEmitAnything(); +}); From 73d7da26ad442891db5ebfab5953a21061135b10 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 22:03:24 -0600 Subject: [PATCH 141/254] Set test to failing to determine what we should do later --- .../streamDefer20220824.test.ts | 117 ++++++++++-------- 1 file changed, 63 insertions(+), 54 deletions(-) diff --git a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts index ea598575307..fd19b925fd4 100644 --- a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts +++ b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts @@ -692,70 +692,79 @@ test("handles errors thrown after initialCount is reached", async () => { await expect(observableStream).not.toEmitAnything(); }); -it("handles errors thrown due to null returned in non-null list items after initialCount is reached", async () => { - const client = new ApolloClient({ - link: createLink({ - nonNullFriendList: () => [friends[0], null, friends[1]], - }), - cache: new InMemoryCache(), - incrementalHandler: new Defer20220824Handler(), - }); - - const query = gql` - query { - nonNullFriendList @stream(initialCount: 1) { - id - name +// TODO: Determine how to handle this case. This emits an error for the item at +// index 1 because it is non-null, but also emits the friend at index 2 to add +// to the array. This leaves us in a bit of an impossible state as +// we can't really set nonNullFriendList[1] to `null`, otherwise we violate the +// schema. Should we stop processing results if we recieve an `items: null` from +// the server indicating an error was thrown to the nearest boundary? +it.failing( + "handles errors thrown due to null returned in non-null list items after initialCount is reached", + async () => { + const client = new ApolloClient({ + link: createLink({ + nonNullFriendList: () => [friends[0], null, friends[1]], + }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query { + nonNullFriendList @stream(initialCount: 1) { + id + name + } } - } - `; + `; - const observableStream = new ObservableStream( - client.watchQuery({ query, errorPolicy: "all" }) - ); + const observableStream = new ObservableStream( + client.watchQuery({ query, errorPolicy: "all" }) + ); - await expect(observableStream).toEmitTypedValue({ - data: undefined, - dataState: "empty", - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); + await expect(observableStream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); - await expect(observableStream).toEmitTypedValue({ - data: markAsStreaming({ - nonNullFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - partial: true, - }); + await expect(observableStream).toEmitTypedValue({ + data: markAsStreaming({ + nonNullFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); - await expect(observableStream).toEmitTypedValue({ - data: { - nonNullFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], - }, - error: new CombinedGraphQLErrors({ + await expect(observableStream).toEmitTypedValue({ data: { nonNullFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, - errors: [ - { - message: - "Cannot return null for non-nullable field Query.nonNullFriendList.", - path: ["nonNullFriendList", 1], + error: new CombinedGraphQLErrors({ + data: { + nonNullFriendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, - ], - }), - dataState: "complete", - loading: false, - networkStatus: NetworkStatus.error, - partial: false, - }); + errors: [ + { + message: + "Cannot return null for non-nullable field Query.nonNullFriendList.", + path: ["nonNullFriendList", 1], + }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); - await expect(observableStream).not.toEmitAnything(); -}); + await expect(observableStream).not.toEmitAnything(); + } +); it("handles stream when in parent deferred fragment", async () => { const { promise: slowFieldPromise, resolve: resolveSlowField } = From 6271687310776bf404d0cf700a07e6bb9e38178a Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 22:07:32 -0600 Subject: [PATCH 142/254] Update assertions based on behavior of old implementation --- .../streamDefer20220824.test.ts | 34 ++++++------------- 1 file changed, 11 insertions(+), 23 deletions(-) diff --git a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts index fd19b925fd4..74564909312 100644 --- a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts +++ b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts @@ -437,22 +437,6 @@ test("handles errors from items after initialCount is reached", async () => { partial: true, }); - await expect(observableStream).toEmitTypedValue({ - data: markAsStreaming({ - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], - }), - error: new CombinedGraphQLErrors({ - data: { - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], - }, - errors: [{ message: "bad", path: ["friendList", 1] }], - }), - dataState: "streaming", - loading: true, - networkStatus: NetworkStatus.streaming, - partial: true, - }); - await expect(observableStream).toEmitTypedValue({ data: markAsStreaming({ friendList: [ @@ -607,14 +591,16 @@ test("handles errors thrown before initialCount is reached", async () => { await expect(observableStream).toEmitTypedValue({ data: { - friendList: null, + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], }, error: new CombinedGraphQLErrors({ - data: { friendList: null }, + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, errors: [ { message: "bad", - path: ["friendList"], + path: ["friendList", 1], }, ], }), @@ -672,14 +658,16 @@ test("handles errors thrown after initialCount is reached", async () => { await expect(observableStream).toEmitTypedValue({ data: { - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], }, error: new CombinedGraphQLErrors({ - data: { friendList: [{ __typename: "Friend", id: "1", name: "Luke" }] }, + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, errors: [ { message: "bad", - path: ["friendList"], + path: ["friendList", 1], }, ], }), @@ -942,7 +930,7 @@ test("handles @defer inside @stream", async () => { expected: (previous) => ({ ...previous, data: markAsStreaming({ - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + friendList: [{ __typename: "Friend", id: "1" }], }), dataState: "streaming", }), From 3ac5544c012b1b03279fc8698665dfd21f72f4e1 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 22:33:12 -0600 Subject: [PATCH 143/254] Rerun api report --- .api-reports/api-report-incremental.api.md | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/.api-reports/api-report-incremental.api.md b/.api-reports/api-report-incremental.api.md index f5ce7d7230e..523c2a8b193 100644 --- a/.api-reports/api-report-incremental.api.md +++ b/.api-reports/api-report-incremental.api.md @@ -25,19 +25,30 @@ namespace Defer20220824Handler { return: Defer20220824Handler.Chunk>; } // (undocumented) - type IncrementalDeferPayload> = { - data?: TData | null | undefined; + type IncrementalDeferResult> = { + data?: TData | null; errors?: ReadonlyArray; extensions?: Record; path?: Incremental.Path; label?: string; }; // (undocumented) + type IncrementalResult> = IncrementalDeferResult | IncrementalStreamResult; + // (undocumented) + type IncrementalStreamResult> = { + errors?: ReadonlyArray; + items?: TData; + path?: Incremental.Path; + label?: string; + extensions?: Record; + }; + // (undocumented) type InitialResult> = { data?: TData | null | undefined; errors?: ReadonlyArray; extensions?: Record; hasNext: boolean; + incremental?: ReadonlyArray>; }; // (undocumented) type SubsequentResult> = { @@ -45,7 +56,7 @@ namespace Defer20220824Handler { errors?: ReadonlyArray; extensions?: Record; hasNext: boolean; - incremental?: Array>; + incremental?: Array>; }; // (undocumented) interface TypeOverrides { From 562e2191a4b38e05edb3da9074e2958db3c7b6b9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 22:48:01 -0600 Subject: [PATCH 144/254] Add changeset --- .changeset/six-islands-drum.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 .changeset/six-islands-drum.md diff --git a/.changeset/six-islands-drum.md b/.changeset/six-islands-drum.md new file mode 100644 index 00000000000..e540e2b375c --- /dev/null +++ b/.changeset/six-islands-drum.md @@ -0,0 +1,8 @@ +--- +"@apollo/client": minor +--- + +Add support for the `@stream` directive on both the `Defer20220824Handler` and the `GraphQL17Alpha2Handler`. + +> [!NOTE] +> The implementations of `@stream` differ in the delivery of incremental results between the different GraphQL spec versions. If you upgrading from the older format to the newer format, expect the timing of some incremental results to change. From 9dcbd37c1ca18e21f3211bc241456e5814d753fa Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 23:56:35 -0600 Subject: [PATCH 145/254] Update size limits --- .size-limits.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.size-limits.json b/.size-limits.json index e48d56978cf..e4f01b44776 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44206, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39060, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33462, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27490 + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44194, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39041, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33526, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27519 } From 3aa091cf29a61667e1cf1f9beb6c0c8445cddd20 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 22:59:47 -0600 Subject: [PATCH 146/254] Ensure multipart/mixed header is set when using stream --- src/incremental/handlers/defer20220824.ts | 2 +- src/incremental/handlers/graphql17Alpha9.ts | 2 +- src/link/http/__tests__/HttpLink.ts | 78 +++++++++++++++++++++ 3 files changed, 80 insertions(+), 2 deletions(-) diff --git a/src/incremental/handlers/defer20220824.ts b/src/incremental/handlers/defer20220824.ts index 07a9fcb7888..378f92adc1b 100644 --- a/src/incremental/handlers/defer20220824.ts +++ b/src/incremental/handlers/defer20220824.ts @@ -177,7 +177,7 @@ export class Defer20220824Handler } prepareRequest(request: ApolloLink.Request): ApolloLink.Request { - if (hasDirectives(["defer"], request.query)) { + if (hasDirectives(["defer", "stream"], request.query)) { const context = request.context ?? {}; const http = (context.http ??= {}); http.accept = [ diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 8062549bf15..58ba0b79cae 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -193,7 +193,7 @@ export class GraphQL17Alpha9Handler /** @internal */ prepareRequest(request: ApolloLink.Request): ApolloLink.Request { - if (hasDirectives(["defer"], request.query)) { + if (hasDirectives(["defer", "stream"], request.query)) { const context = request.context ?? {}; const http = (context.http ??= {}); http.accept = ["multipart/mixed", ...(http.accept || [])]; diff --git a/src/link/http/__tests__/HttpLink.ts b/src/link/http/__tests__/HttpLink.ts index a60975fd827..489ab4829f2 100644 --- a/src/link/http/__tests__/HttpLink.ts +++ b/src/link/http/__tests__/HttpLink.ts @@ -57,6 +57,15 @@ const sampleDeferredQuery = gql` } `; +const sampleStreamedQuery = gql` + query SampleDeferredQuery { + stubs @stream { + id + name + } + } +`; + const sampleQueryCustomDirective = gql` query SampleDeferredQuery { stub { @@ -1341,6 +1350,23 @@ describe("HttpLink", () => { "-----", ].join("\r\n"); + const streamBody = [ + "---", + "Content-Type: application/json; charset=utf-8", + "Content-Length: 43", + "", + '{"data":{"stubs":[]},"hasNext":true}', + "---", + "Content-Type: application/json; charset=utf-8", + "Content-Length: 58", + "", + // Intentionally using the boundary value `---` within the “name” to + // validate that boundary delimiters are not parsed within the response + // data itself, only read at the beginning of each chunk. + '{"hasNext":false, "incremental": [{"data":{"id":"1","name":"stubby---"},"path":["stubs", 1],"extensions":{"timestamp":1633038919}}]}', + "-----", + ].join("\r\n"); + const finalChunkOnlyHasNextFalse = [ "--graphql", "content-type: application/json", @@ -1524,6 +1550,58 @@ describe("HttpLink", () => { ); }); + it("sets correct accept header on request with streamed query", async () => { + const stream = ReadableStream.from( + streamBody.split("\r\n").map((line) => line + "\r\n") + ); + const fetch = jest.fn(async () => { + return new Response(stream, { + status: 200, + headers: { "content-type": "multipart/mixed" }, + }); + }); + + const { link, observableStream } = pipeLinkToObservableStream( + new HttpLink({ fetch }) + ); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + void client.query({ query: sampleStreamedQuery }); + + await expect(observableStream).toEmitTypedValue({ + data: { stubs: [] }, + hasNext: true, + }); + + await expect(observableStream).toEmitTypedValue({ + incremental: [ + { + data: { id: "1", name: "stubby---" }, + path: ["stubs", 1], + extensions: { timestamp: 1633038919 }, + }, + ], + hasNext: false, + }); + + await expect(observableStream).toComplete(); + + expect(fetch).toHaveBeenCalledWith( + "/graphql", + expect.objectContaining({ + headers: { + "content-type": "application/json", + accept: + "multipart/mixed;deferSpec=20220824,application/graphql-response+json,application/json;q=0.9", + }, + }) + ); + }); + // ensure that custom directives beginning with '@defer..' do not trigger // custom accept header for multipart responses it("sets does not set accept header on query with custom directive begging with @defer", async () => { From 802c7d98da4ed4c03f29e7a9cbfe07662d035332 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 23:08:47 -0600 Subject: [PATCH 147/254] Add tests for alpha.9 in HttpLink --- src/link/http/__tests__/HttpLink.ts | 150 +++++++++++++++++++++++++++- 1 file changed, 149 insertions(+), 1 deletion(-) diff --git a/src/link/http/__tests__/HttpLink.ts b/src/link/http/__tests__/HttpLink.ts index 489ab4829f2..69792fab1c3 100644 --- a/src/link/http/__tests__/HttpLink.ts +++ b/src/link/http/__tests__/HttpLink.ts @@ -19,7 +19,10 @@ import { PROTOCOL_ERRORS_SYMBOL, ServerParseError, } from "@apollo/client/errors"; -import { Defer20220824Handler } from "@apollo/client/incremental"; +import { + Defer20220824Handler, + GraphQL17Alpha9Handler, +} from "@apollo/client/incremental"; import { ApolloLink } from "@apollo/client/link"; import { BaseHttpLink, HttpLink } from "@apollo/client/link/http"; import { @@ -1350,6 +1353,23 @@ describe("HttpLink", () => { "-----", ].join("\r\n"); + const bodyAlpha9 = [ + "---", + "Content-Type: application/json; charset=utf-8", + "Content-Length: 43", + "", + '{"data":{"stub":{"id":"0"}},"pending":[{"id":"0","path":["stub"]}],"hasNext":true}', + "---", + "Content-Type: application/json; charset=utf-8", + "Content-Length: 58", + "", + // Intentionally using the boundary value `---` within the “name” to + // validate that boundary delimiters are not parsed within the response + // data itself, only read at the beginning of each chunk. + '{"hasNext":false, "incremental": [{"data":{"name":"stubby---"},"id":"0","extensions":{"timestamp":1633038919}}]}', + "-----", + ].join("\r\n"); + const streamBody = [ "---", "Content-Type: application/json; charset=utf-8", @@ -1367,6 +1387,23 @@ describe("HttpLink", () => { "-----", ].join("\r\n"); + const streamBodyAlpha9 = [ + "---", + "Content-Type: application/json; charset=utf-8", + "Content-Length: 43", + "", + '{"data":{"stubs":[]},"pending": [{"id":"0","path":["stubs"]}], "hasNext":true}', + "---", + "Content-Type: application/json; charset=utf-8", + "Content-Length: 58", + "", + // Intentionally using the boundary value `---` within the “name” to + // validate that boundary delimiters are not parsed within the response + // data itself, only read at the beginning of each chunk. + '{"hasNext":false, "incremental": [{"items":[{"id":"1","name":"stubby---"}],"id":"0","extensions":{"timestamp":1633038919}}],"completed":[{"id":"0"}]}', + "-----", + ].join("\r\n"); + const finalChunkOnlyHasNextFalse = [ "--graphql", "content-type: application/json", @@ -1550,6 +1587,61 @@ describe("HttpLink", () => { ); }); + it("sets correct accept header on request with deferred query using GraphQL17Alpha9Handler", async () => { + const stream = ReadableStream.from( + bodyAlpha9.split("\r\n").map((line) => line + "\r\n") + ); + const fetch = jest.fn(async () => { + return new Response(stream, { + status: 200, + headers: { "content-type": "multipart/mixed" }, + }); + }); + + const { link, observableStream } = pipeLinkToObservableStream( + new HttpLink({ fetch }) + ); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + void client.query({ query: sampleDeferredQuery }); + + await expect(observableStream).toEmitTypedValue({ + data: { stub: { id: "0" } }, + // @ts-ignore + pending: [{ id: "0", path: ["stub"] }], + hasNext: true, + }); + + await expect(observableStream).toEmitTypedValue({ + incremental: [ + { + data: { name: "stubby---" }, + // @ts-ignore + id: "0", + extensions: { timestamp: 1633038919 }, + }, + ], + hasNext: false, + }); + + await expect(observableStream).toComplete(); + + expect(fetch).toHaveBeenCalledWith( + "/graphql", + expect.objectContaining({ + headers: { + "content-type": "application/json", + accept: + "multipart/mixed,application/graphql-response+json,application/json;q=0.9", + }, + }) + ); + }); + it("sets correct accept header on request with streamed query", async () => { const stream = ReadableStream.from( streamBody.split("\r\n").map((line) => line + "\r\n") @@ -1602,6 +1694,62 @@ describe("HttpLink", () => { ); }); + it("sets correct accept header on request with streamed query using GraphQL17Alpha9Handler", async () => { + const stream = ReadableStream.from( + streamBodyAlpha9.split("\r\n").map((line) => line + "\r\n") + ); + const fetch = jest.fn(async () => { + return new Response(stream, { + status: 200, + headers: { "content-type": "multipart/mixed" }, + }); + }); + + const { link, observableStream } = pipeLinkToObservableStream( + new HttpLink({ fetch }) + ); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + void client.query({ query: sampleStreamedQuery }); + + await expect(observableStream).toEmitTypedValue({ + data: { stubs: [] }, + // @ts-ignore + pending: [{ id: "0", path: ["stubs"] }], + hasNext: true, + }); + + await expect(observableStream).toEmitTypedValue({ + incremental: [ + { + // @ts-ignore + items: [{ id: "1", name: "stubby---" }], + id: "0", + extensions: { timestamp: 1633038919 }, + }, + ], + completed: [{ id: "0" }], + hasNext: false, + }); + + await expect(observableStream).toComplete(); + + expect(fetch).toHaveBeenCalledWith( + "/graphql", + expect.objectContaining({ + headers: { + "content-type": "application/json", + accept: + "multipart/mixed,application/graphql-response+json,application/json;q=0.9", + }, + }) + ); + }); + // ensure that custom directives beginning with '@defer..' do not trigger // custom accept header for multipart responses it("sets does not set accept header on query with custom directive begging with @defer", async () => { From 86e0025f7ef7948ecf164d72c080beb23971c370 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 23:12:47 -0600 Subject: [PATCH 148/254] Update exports snapshot --- src/__tests__/__snapshots__/exports.ts.snap | 1 + 1 file changed, 1 insertion(+) diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap index 9207e712cdd..f043e84aad4 100644 --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -366,6 +366,7 @@ Array [ "mockDefer20220824", "mockDeferStreamGraphQL17Alpha9", "mockMultipartSubscriptionStream", + "promiseWithResolvers", "renderAsync", "renderHookAsync", "resetApolloContext", From c398a5549a311e255f4cbbd5b1ed452d41724692 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 9 Sep 2025 23:40:10 -0600 Subject: [PATCH 149/254] Always create a new DeepMerger --- src/incremental/handlers/defer20220824.ts | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/src/incremental/handlers/defer20220824.ts b/src/incremental/handlers/defer20220824.ts index 378f92adc1b..bfa9a68d73b 100644 --- a/src/incremental/handlers/defer20220824.ts +++ b/src/incremental/handlers/defer20220824.ts @@ -74,11 +74,13 @@ class DeferRequest> private errors: Array = []; private extensions: Record = {}; private data: any = {}; - private merger = new DeepMerger(); - private merge(normalized: FormattedExecutionResult) { + private merge( + normalized: FormattedExecutionResult, + merger: DeepMerger + ) { if (normalized.data !== undefined) { - this.data = this.merger.merge(this.data, normalized.data); + this.data = merger.merge(this.data, normalized.data); } if (normalized.errors) { this.errors.push(...normalized.errors); @@ -94,7 +96,7 @@ class DeferRequest> ): FormattedExecutionResult { this.hasNext = chunk.hasNext; this.data = cacheData; - this.merge(chunk); + this.merge(chunk, new DeepMerger()); if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { @@ -117,11 +119,14 @@ class DeferRequest> data = parent as typeof data; } } - this.merge({ - errors, - extensions, - data: data ? (data as TData) : undefined, - }); + this.merge( + { + errors, + extensions, + data: data ? (data as TData) : undefined, + }, + new DeepMerger() + ); } } From b204d224d5d50dd53530e69dc511402bcf856fdb Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 14:21:36 -0600 Subject: [PATCH 150/254] Add test helpers to execute schema incrementally --- .../executeSchemaGraphQL17Alpha2.ts | 36 +++++++++++++++++++ .../executeSchemaGraphQL17Alpha9.ts | 36 +++++++++++++++++++ src/testing/internal/index.ts | 2 ++ 3 files changed, 74 insertions(+) create mode 100644 src/testing/internal/incremental/executeSchemaGraphQL17Alpha2.ts create mode 100644 src/testing/internal/incremental/executeSchemaGraphQL17Alpha9.ts diff --git a/src/testing/internal/incremental/executeSchemaGraphQL17Alpha2.ts b/src/testing/internal/incremental/executeSchemaGraphQL17Alpha2.ts new file mode 100644 index 00000000000..eeba9cde67c --- /dev/null +++ b/src/testing/internal/incremental/executeSchemaGraphQL17Alpha2.ts @@ -0,0 +1,36 @@ +import type { + FormattedExecutionResult, + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, + GraphQLSchema, +} from "graphql-17-alpha2"; +import { experimentalExecuteIncrementally } from "graphql-17-alpha2"; + +import type { DocumentNode } from "@apollo/client"; + +export async function* executeSchemaGraphQL17Alpha2( + schema: GraphQLSchema, + document: DocumentNode, + rootValue: unknown = {} +): AsyncGenerator< + | FormattedInitialIncrementalExecutionResult + | FormattedSubsequentIncrementalExecutionResult + | FormattedExecutionResult, + void +> { + const result = await experimentalExecuteIncrementally({ + schema, + document, + rootValue, + }); + + if ("initialResult" in result) { + yield JSON.parse(JSON.stringify(result.initialResult)); + + for await (const patch of result.subsequentResults) { + yield JSON.parse(JSON.stringify(patch)); + } + } else { + yield JSON.parse(JSON.stringify(result)); + } +} diff --git a/src/testing/internal/incremental/executeSchemaGraphQL17Alpha9.ts b/src/testing/internal/incremental/executeSchemaGraphQL17Alpha9.ts new file mode 100644 index 00000000000..ebeee5ebc47 --- /dev/null +++ b/src/testing/internal/incremental/executeSchemaGraphQL17Alpha9.ts @@ -0,0 +1,36 @@ +import type { + FormattedExecutionResult, + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, + GraphQLSchema, +} from "graphql-17-alpha9"; +import { experimentalExecuteIncrementally } from "graphql-17-alpha9"; + +import type { DocumentNode } from "@apollo/client"; + +export async function* executeSchemaGraphQL17Alpha9( + schema: GraphQLSchema, + document: DocumentNode, + rootValue: unknown = {} +): AsyncGenerator< + | FormattedInitialIncrementalExecutionResult + | FormattedSubsequentIncrementalExecutionResult + | FormattedExecutionResult, + void +> { + const result = await experimentalExecuteIncrementally({ + schema, + document, + rootValue, + }); + + if ("initialResult" in result) { + yield JSON.parse(JSON.stringify(result.initialResult)); + + for await (const patch of result.subsequentResults) { + yield JSON.parse(JSON.stringify(patch)); + } + } else { + yield JSON.parse(JSON.stringify(result)); + } +} diff --git a/src/testing/internal/index.ts b/src/testing/internal/index.ts index 070e644ad14..f08e6b027bd 100644 --- a/src/testing/internal/index.ts +++ b/src/testing/internal/index.ts @@ -23,6 +23,8 @@ export { } from "./scenarios/index.js"; export { createClientWrapper, createMockWrapper } from "./renderHelpers.js"; export { actAsync } from "./rtl/actAsync.js"; +export { executeSchemaGraphQL17Alpha2 } from "./incremental/executeSchemaGraphQL17Alpha2.js"; +export { executeSchemaGraphQL17Alpha9 } from "./incremental/executeSchemaGraphQL17Alpha9.js"; export { promiseWithResolvers } from "./promiseWithResolvers.js"; export { renderAsync } from "./rtl/renderAsync.js"; export { renderHookAsync } from "./rtl/renderHookAsync.js"; From 2f620ef84b274e6d8708ee0152b85dc44211fcf1 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 14:32:13 -0600 Subject: [PATCH 151/254] Add enableEarlyExecution option --- .../internal/incremental/executeSchemaGraphQL17Alpha9.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/testing/internal/incremental/executeSchemaGraphQL17Alpha9.ts b/src/testing/internal/incremental/executeSchemaGraphQL17Alpha9.ts index ebeee5ebc47..8285297367b 100644 --- a/src/testing/internal/incremental/executeSchemaGraphQL17Alpha9.ts +++ b/src/testing/internal/incremental/executeSchemaGraphQL17Alpha9.ts @@ -11,7 +11,8 @@ import type { DocumentNode } from "@apollo/client"; export async function* executeSchemaGraphQL17Alpha9( schema: GraphQLSchema, document: DocumentNode, - rootValue: unknown = {} + rootValue: unknown = {}, + enableEarlyExecution?: boolean ): AsyncGenerator< | FormattedInitialIncrementalExecutionResult | FormattedSubsequentIncrementalExecutionResult @@ -22,6 +23,7 @@ export async function* executeSchemaGraphQL17Alpha9( schema, document, rootValue, + enableEarlyExecution, }); if ("initialResult" in result) { From 6b4156cc87f30b47e5e8b498f7cfdc830b9751fe Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 14:33:46 -0600 Subject: [PATCH 152/254] Update existing tests to use new execute helpers --- .../streamDefer20220824.test.ts | 38 ++--------------- .../streamGraphQL17Alpha9.test.ts | 40 ++---------------- .../__tests__/defer20220824/defer.test.ts | 35 ++-------------- .../__tests__/defer20220824/stream.test.ts | 33 ++------------- .../__tests__/graphql17Alpha9/defer.test.ts | 42 ++++--------------- .../__tests__/graphql17Alpha9/stream.test.ts | 34 ++++----------- 6 files changed, 31 insertions(+), 191 deletions(-) diff --git a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts index 74564909312..5c842c3db2b 100644 --- a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts +++ b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts @@ -1,10 +1,4 @@ -import type { - FormattedExecutionResult, - FormattedInitialIncrementalExecutionResult, - FormattedSubsequentIncrementalExecutionResult, -} from "graphql-17-alpha2"; import { - experimentalExecuteIncrementally, GraphQLID, GraphQLList, GraphQLNonNull, @@ -14,7 +8,6 @@ import { } from "graphql-17-alpha2"; import { from } from "rxjs"; -import type { DocumentNode } from "@apollo/client"; import { ApolloClient, ApolloLink, @@ -25,6 +18,7 @@ import { } from "@apollo/client"; import { Defer20220824Handler } from "@apollo/client/incremental"; import { + executeSchemaGraphQL17Alpha2, markAsStreaming, mockDefer20220824, ObservableStream, @@ -91,35 +85,11 @@ const query = new GraphQLObjectType({ const schema = new GraphQLSchema({ query }); -async function* run( - document: DocumentNode, - rootValue: unknown = {} -): AsyncGenerator< - | FormattedInitialIncrementalExecutionResult - | FormattedSubsequentIncrementalExecutionResult - | FormattedExecutionResult, - void -> { - const result = await experimentalExecuteIncrementally({ - schema, - document, - rootValue, - }); - - if ("initialResult" in result) { - yield JSON.parse(JSON.stringify(result.initialResult)); - - for await (const patch of result.subsequentResults) { - yield JSON.parse(JSON.stringify(patch)); - } - } else { - yield JSON.parse(JSON.stringify(result)); - } -} - function createLink(rootValue?: Record) { return new ApolloLink((operation) => { - return from(run(operation.query, rootValue)); + return from( + executeSchemaGraphQL17Alpha2(schema, operation.query, rootValue) + ); }); } diff --git a/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts index c414da5a7bc..c41861e597e 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts @@ -1,10 +1,4 @@ -import type { - FormattedExecutionResult, - FormattedInitialIncrementalExecutionResult, - FormattedSubsequentIncrementalExecutionResult, -} from "graphql-17-alpha9"; import { - experimentalExecuteIncrementally, GraphQLID, GraphQLList, GraphQLNonNull, @@ -14,7 +8,6 @@ import { } from "graphql-17-alpha9"; import { from } from "rxjs"; -import type { DocumentNode } from "@apollo/client"; import { ApolloClient, ApolloLink, @@ -25,6 +18,7 @@ import { } from "@apollo/client"; import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { + executeSchemaGraphQL17Alpha9, markAsStreaming, mockDeferStreamGraphQL17Alpha9, ObservableStream, @@ -91,37 +85,11 @@ const query = new GraphQLObjectType({ const schema = new GraphQLSchema({ query }); -async function* run( - document: DocumentNode, - rootValue: unknown = {}, - enableEarlyExecution = false -): AsyncGenerator< - | FormattedInitialIncrementalExecutionResult - | FormattedSubsequentIncrementalExecutionResult - | FormattedExecutionResult, - void -> { - const result = await experimentalExecuteIncrementally({ - schema, - document, - rootValue, - enableEarlyExecution, - }); - - if ("initialResult" in result) { - yield JSON.parse(JSON.stringify(result.initialResult)); - - for await (const patch of result.subsequentResults) { - yield JSON.parse(JSON.stringify(patch)); - } - } else { - yield JSON.parse(JSON.stringify(result)); - } -} - function createLink(rootValue?: Record) { return new ApolloLink((operation) => { - return from(run(operation.query, rootValue)); + return from( + executeSchemaGraphQL17Alpha9(schema, operation.query, rootValue) + ); }); } diff --git a/src/incremental/handlers/__tests__/defer20220824/defer.test.ts b/src/incremental/handlers/__tests__/defer20220824/defer.test.ts index 51f2eb9c874..5573423d24f 100644 --- a/src/incremental/handlers/__tests__/defer20220824/defer.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/defer.test.ts @@ -1,13 +1,6 @@ import assert from "node:assert"; -import type { - DocumentNode, - FormattedExecutionResult, - FormattedInitialIncrementalExecutionResult, - FormattedSubsequentIncrementalExecutionResult, -} from "graphql-17-alpha2"; import { - experimentalExecuteIncrementally, GraphQLID, GraphQLList, GraphQLNonNull, @@ -16,6 +9,7 @@ import { GraphQLString, } from "graphql-17-alpha2"; +import type { DocumentNode } from "@apollo/client"; import { ApolloClient, ApolloLink, @@ -27,6 +21,7 @@ import { } from "@apollo/client"; import { Defer20220824Handler } from "@apollo/client/incremental"; import { + executeSchemaGraphQL17Alpha2, markAsStreaming, mockDefer20220824, ObservableStream, @@ -105,30 +100,8 @@ function resolveOnNextTick(): Promise { return Promise.resolve(undefined); } -async function* run( - document: DocumentNode -): AsyncGenerator< - | FormattedInitialIncrementalExecutionResult - | FormattedSubsequentIncrementalExecutionResult, - FormattedExecutionResult | void -> { - const result = await experimentalExecuteIncrementally({ - schema, - document, - rootValue: {}, - }); - if ("initialResult" in result) { - yield JSON.parse( - JSON.stringify(result.initialResult) - ) as FormattedInitialIncrementalExecutionResult; - for await (const incremental of result.subsequentResults) { - yield JSON.parse( - JSON.stringify(incremental) - ) as FormattedSubsequentIncrementalExecutionResult; - } - } else { - return result; - } +function run(query: DocumentNode) { + return executeSchemaGraphQL17Alpha2(schema, query); } const schemaLink = new ApolloLink((operation) => { diff --git a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts index 485c8a69b69..d560165c819 100644 --- a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts @@ -1,12 +1,6 @@ import assert from "node:assert"; -import type { - FormattedExecutionResult, - FormattedInitialIncrementalExecutionResult, - FormattedSubsequentIncrementalExecutionResult, -} from "graphql-17-alpha2"; import { - experimentalExecuteIncrementally, GraphQLID, GraphQLList, GraphQLNonNull, @@ -26,6 +20,7 @@ import { } from "@apollo/client"; import { Defer20220824Handler } from "@apollo/client/incremental"; import { + executeSchemaGraphQL17Alpha2, markAsStreaming, ObservableStream, promiseWithResolvers, @@ -94,30 +89,8 @@ const query = new GraphQLObjectType({ const schema = new GraphQLSchema({ query }); -async function* run( - document: DocumentNode, - rootValue: unknown = {} -): AsyncGenerator< - | FormattedInitialIncrementalExecutionResult - | FormattedSubsequentIncrementalExecutionResult - | FormattedExecutionResult, - void -> { - const result = await experimentalExecuteIncrementally({ - schema, - document, - rootValue, - }); - - if ("initialResult" in result) { - yield JSON.parse(JSON.stringify(result.initialResult)); - - for await (const patch of result.subsequentResults) { - yield JSON.parse(JSON.stringify(patch)); - } - } else { - yield JSON.parse(JSON.stringify(result)); - } +function run(document: DocumentNode, rootValue: unknown = {}) { + return executeSchemaGraphQL17Alpha2(schema, document, rootValue); } function createSchemaLink(rootValue?: Record) { diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts index 06f47d063bc..96b493852bd 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts @@ -1,13 +1,6 @@ import assert from "node:assert"; -import type { - DocumentNode, - FormattedExecutionResult, - FormattedInitialIncrementalExecutionResult, - FormattedSubsequentIncrementalExecutionResult, -} from "graphql-17-alpha9"; import { - experimentalExecuteIncrementally, GraphQLID, GraphQLList, GraphQLNonNull, @@ -16,6 +9,7 @@ import { GraphQLString, } from "graphql-17-alpha9"; +import type { DocumentNode } from "@apollo/client"; import { ApolloClient, ApolloLink, @@ -27,6 +21,7 @@ import { } from "@apollo/client"; import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { + executeSchemaGraphQL17Alpha9, markAsStreaming, mockDeferStreamGraphQL17Alpha9, ObservableStream, @@ -154,36 +149,17 @@ function resolveOnNextTick(): Promise { return Promise.resolve(undefined); } -async function* run( +function run( document: DocumentNode, - rootValue: Record = { hero }, - enableEarlyExecution = false -): AsyncGenerator< - | FormattedInitialIncrementalExecutionResult - | FormattedSubsequentIncrementalExecutionResult - | FormattedExecutionResult, - void -> { - const result = await experimentalExecuteIncrementally({ + rootValue: unknown = {}, + enableEarlyExecution?: boolean +) { + return executeSchemaGraphQL17Alpha9( schema, document, rootValue, - enableEarlyExecution, - }); - - if ("initialResult" in result) { - yield JSON.parse( - JSON.stringify(result.initialResult) - ) as FormattedInitialIncrementalExecutionResult; - - for await (const incremental of result.subsequentResults) { - yield JSON.parse( - JSON.stringify(incremental) - ) as FormattedSubsequentIncrementalExecutionResult; - } - } else { - yield JSON.parse(JSON.stringify(result)) as FormattedExecutionResult; - } + enableEarlyExecution + ); } function createSchemaLink(rootValue?: Record) { diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index db3beac262b..9f2b40356ef 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -1,13 +1,6 @@ import assert from "node:assert"; -import type { - DocumentNode, - FormattedExecutionResult, - FormattedInitialIncrementalExecutionResult, - FormattedSubsequentIncrementalExecutionResult, -} from "graphql-17-alpha9"; import { - experimentalExecuteIncrementally, GraphQLID, GraphQLList, GraphQLNonNull, @@ -20,12 +13,14 @@ import { from } from "rxjs"; import { ApolloClient, ApolloLink, + DocumentNode, gql, InMemoryCache, NetworkStatus, } from "@apollo/client"; import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { + executeSchemaGraphQL17Alpha9, markAsStreaming, ObservableStream, promiseWithResolvers, @@ -98,32 +93,17 @@ function resolveOnNextTick(): Promise { return Promise.resolve(undefined); } -async function* run( +function run( document: DocumentNode, rootValue: unknown = {}, enableEarlyExecution = false -): AsyncGenerator< - | FormattedInitialIncrementalExecutionResult - | FormattedSubsequentIncrementalExecutionResult - | FormattedExecutionResult, - void -> { - const result = await experimentalExecuteIncrementally({ +) { + return executeSchemaGraphQL17Alpha9( schema, document, rootValue, - enableEarlyExecution, - }); - - if ("initialResult" in result) { - yield JSON.parse(JSON.stringify(result.initialResult)); - - for await (const patch of result.subsequentResults) { - yield JSON.parse(JSON.stringify(patch)); - } - } else { - yield JSON.parse(JSON.stringify(result)); - } + enableEarlyExecution + ); } function createSchemaLink(rootValue?: Record) { From 1d3f36c3a8a3480e585416538f4964bc6a0c2c9e Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 14:36:43 -0600 Subject: [PATCH 153/254] Simplify link in tests --- .../handlers/__tests__/defer20220824/defer.test.ts | 11 ++--------- .../handlers/__tests__/graphql17Alpha9/defer.test.ts | 11 ++--------- 2 files changed, 4 insertions(+), 18 deletions(-) diff --git a/src/incremental/handlers/__tests__/defer20220824/defer.test.ts b/src/incremental/handlers/__tests__/defer20220824/defer.test.ts index 5573423d24f..2d416198617 100644 --- a/src/incremental/handlers/__tests__/defer20220824/defer.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/defer.test.ts @@ -8,6 +8,7 @@ import { GraphQLSchema, GraphQLString, } from "graphql-17-alpha2"; +import { from } from "rxjs"; import type { DocumentNode } from "@apollo/client"; import { @@ -17,7 +18,6 @@ import { gql, InMemoryCache, NetworkStatus, - Observable, } from "@apollo/client"; import { Defer20220824Handler } from "@apollo/client/incremental"; import { @@ -105,14 +105,7 @@ function run(query: DocumentNode) { } const schemaLink = new ApolloLink((operation) => { - return new Observable((observer) => { - void (async () => { - for await (const chunk of run(operation.query)) { - observer.next(chunk); - } - observer.complete(); - })(); - }); + return from(run(operation.query)); }); describe("graphql-js test cases", () => { diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts index 96b493852bd..1d40bc7fc0a 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts @@ -8,6 +8,7 @@ import { GraphQLSchema, GraphQLString, } from "graphql-17-alpha9"; +import { from } from "rxjs"; import type { DocumentNode } from "@apollo/client"; import { @@ -17,7 +18,6 @@ import { gql, InMemoryCache, NetworkStatus, - Observable, } from "@apollo/client"; import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { @@ -164,14 +164,7 @@ function run( function createSchemaLink(rootValue?: Record) { return new ApolloLink((operation) => { - return new Observable((observer) => { - void (async () => { - for await (const chunk of run(operation.query, rootValue)) { - observer.next(chunk); - } - observer.complete(); - })(); - }); + return from(run(operation.query, rootValue)); }); } From 7d238ff3f63b6391d637a6767be18dd2795f736c Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 14:55:37 -0600 Subject: [PATCH 154/254] Add schemas for the friend list --- src/testing/internal/index.ts | 3 + .../schemas/friendList.graphql17Alpha2.ts | 62 +++++++++++++++++++ .../schemas/friendList.graphql17Alpha9.ts | 62 +++++++++++++++++++ 3 files changed, 127 insertions(+) create mode 100644 src/testing/internal/schemas/friendList.graphql17Alpha2.ts create mode 100644 src/testing/internal/schemas/friendList.graphql17Alpha9.ts diff --git a/src/testing/internal/index.ts b/src/testing/internal/index.ts index f08e6b027bd..54301cd5c61 100644 --- a/src/testing/internal/index.ts +++ b/src/testing/internal/index.ts @@ -38,3 +38,6 @@ export { } from "./link.js"; export { markAsStreaming } from "./markAsStreaming.js"; export { wait } from "./wait.js"; + +export { friendListSchemaGraphQL17Alpha2 } from "./schemas/friendList.graphql17Alpha2.js"; +export { friendListSchemaGraphQL17Alpha9 } from "./schemas/friendList.graphql17Alpha9.js"; diff --git a/src/testing/internal/schemas/friendList.graphql17Alpha2.ts b/src/testing/internal/schemas/friendList.graphql17Alpha2.ts new file mode 100644 index 00000000000..07ab96da399 --- /dev/null +++ b/src/testing/internal/schemas/friendList.graphql17Alpha2.ts @@ -0,0 +1,62 @@ +import { + GraphQLID, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, +} from "graphql-17-alpha2"; + +const friendType = new GraphQLObjectType({ + fields: { + id: { type: GraphQLID }, + name: { type: GraphQLString }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, + }, + name: "Friend", +}); + +const query = new GraphQLObjectType({ + fields: { + scalarList: { + type: new GraphQLList(GraphQLString), + }, + scalarListList: { + type: new GraphQLList(new GraphQLList(GraphQLString)), + }, + friendList: { + type: new GraphQLList(friendType), + }, + nonNullFriendList: { + type: new GraphQLList(new GraphQLNonNull(friendType)), + }, + nestedObject: { + type: new GraphQLObjectType({ + name: "NestedObject", + fields: { + scalarField: { + type: GraphQLString, + }, + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + nestedFriendList: { type: new GraphQLList(friendType) }, + deeperNestedObject: { + type: new GraphQLObjectType({ + name: "DeeperNestedObject", + fields: { + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + deeperNestedFriendList: { type: new GraphQLList(friendType) }, + }, + }), + }, + }, + }), + }, + }, + name: "Query", +}); + +export const friendListSchemaGraphQL17Alpha2 = new GraphQLSchema({ query }); diff --git a/src/testing/internal/schemas/friendList.graphql17Alpha9.ts b/src/testing/internal/schemas/friendList.graphql17Alpha9.ts new file mode 100644 index 00000000000..1cd844a5203 --- /dev/null +++ b/src/testing/internal/schemas/friendList.graphql17Alpha9.ts @@ -0,0 +1,62 @@ +import { + GraphQLID, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, +} from "graphql-17-alpha9"; + +const friendType = new GraphQLObjectType({ + fields: { + id: { type: GraphQLID }, + name: { type: GraphQLString }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, + }, + name: "Friend", +}); + +const query = new GraphQLObjectType({ + fields: { + scalarList: { + type: new GraphQLList(GraphQLString), + }, + scalarListList: { + type: new GraphQLList(new GraphQLList(GraphQLString)), + }, + friendList: { + type: new GraphQLList(friendType), + }, + nonNullFriendList: { + type: new GraphQLList(new GraphQLNonNull(friendType)), + }, + nestedObject: { + type: new GraphQLObjectType({ + name: "NestedObject", + fields: { + scalarField: { + type: GraphQLString, + }, + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + nestedFriendList: { type: new GraphQLList(friendType) }, + deeperNestedObject: { + type: new GraphQLObjectType({ + name: "DeeperNestedObject", + fields: { + nonNullScalarField: { + type: new GraphQLNonNull(GraphQLString), + }, + deeperNestedFriendList: { type: new GraphQLList(friendType) }, + }, + }), + }, + }, + }), + }, + }, + name: "Query", +}); + +export const friendListSchemaGraphQL17Alpha9 = new GraphQLSchema({ query }); From fc671ff0d0a9612cfffda8aa085ff142d6a27808 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 15:42:02 -0600 Subject: [PATCH 155/254] Add helper to emit values in async iterable --- src/testing/internal/asyncIterableSubject.ts | 16 ++++++++++++++++ src/testing/internal/index.ts | 1 + 2 files changed, 17 insertions(+) create mode 100644 src/testing/internal/asyncIterableSubject.ts diff --git a/src/testing/internal/asyncIterableSubject.ts b/src/testing/internal/asyncIterableSubject.ts new file mode 100644 index 00000000000..9085dd149b4 --- /dev/null +++ b/src/testing/internal/asyncIterableSubject.ts @@ -0,0 +1,16 @@ +import { Subject } from "rxjs"; + +export function asyncIterableSubject() { + const subject = new Subject(); + + const stream = new ReadableStream({ + start: (controller) => { + subject.subscribe({ + next: (value) => controller.enqueue(value), + complete: () => controller.close(), + }); + }, + }); + + return { subject, stream }; +} diff --git a/src/testing/internal/index.ts b/src/testing/internal/index.ts index 54301cd5c61..de686fc955d 100644 --- a/src/testing/internal/index.ts +++ b/src/testing/internal/index.ts @@ -23,6 +23,7 @@ export { } from "./scenarios/index.js"; export { createClientWrapper, createMockWrapper } from "./renderHelpers.js"; export { actAsync } from "./rtl/actAsync.js"; +export { asyncIterableSubject } from "./asyncIterableSubject.js"; export { executeSchemaGraphQL17Alpha2 } from "./incremental/executeSchemaGraphQL17Alpha2.js"; export { executeSchemaGraphQL17Alpha9 } from "./incremental/executeSchemaGraphQL17Alpha9.js"; export { promiseWithResolvers } from "./promiseWithResolvers.js"; From 1e3e80cf079ae34caf0df986be05a0287fcf3eae Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 17:52:50 -0600 Subject: [PATCH 156/254] Add offset arg to friendList --- src/testing/internal/schemas/friendList.graphql17Alpha9.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/testing/internal/schemas/friendList.graphql17Alpha9.ts b/src/testing/internal/schemas/friendList.graphql17Alpha9.ts index 1cd844a5203..4f774afab13 100644 --- a/src/testing/internal/schemas/friendList.graphql17Alpha9.ts +++ b/src/testing/internal/schemas/friendList.graphql17Alpha9.ts @@ -1,5 +1,6 @@ import { GraphQLID, + GraphQLInt, GraphQLList, GraphQLNonNull, GraphQLObjectType, @@ -26,6 +27,11 @@ const query = new GraphQLObjectType({ }, friendList: { type: new GraphQLList(friendType), + args: { + offset: { + type: GraphQLInt, + }, + }, }, nonNullFriendList: { type: new GraphQLList(new GraphQLNonNull(friendType)), From c22394bd706bc5c7543bd7eb03f921ec4cc6f1d9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 18:21:17 -0600 Subject: [PATCH 157/254] Add dom.asyncIterable to tests --- tsconfig.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tsconfig.json b/tsconfig.json index 7bbdcf7fdcc..578691cbab1 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -18,7 +18,7 @@ "experimentalDecorators": true, "outDir": "./dist", "rootDir": "./src", - "lib": ["DOM", "ES2023"], + "lib": ["DOM", "dom.asyncIterable", "ES2023"], "types": [ "jest", "node", From 38b24c7132f2f8b97383fd9683e0a7b0a431853c Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 19:17:53 -0600 Subject: [PATCH 158/254] Add tests for useSuspenseQuery with @stream --- .../streamGraphQL17Alpha9.test.tsx | 1688 +++++++++++++++++ 1 file changed, 1688 insertions(+) create mode 100644 src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx new file mode 100644 index 00000000000..39e93447e45 --- /dev/null +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -0,0 +1,1688 @@ +import type { RenderOptions } from "@testing-library/react"; +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import React, { Suspense } from "react"; +import { ErrorBoundary } from "react-error-boundary"; +import type { Subject } from "rxjs"; +import { delay, from, throwError } from "rxjs"; + +import type { ErrorLike, OperationVariables } from "@apollo/client"; +import { + ApolloClient, + ApolloLink, + CombinedGraphQLErrors, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import { useSuspenseQuery } from "@apollo/client/react"; +import { + asyncIterableSubject, + createClientWrapper, + executeSchemaGraphQL17Alpha9, + friendListSchemaGraphQL17Alpha9, + markAsStreaming, + spyOnConsole, + wait, +} from "@apollo/client/testing/internal"; +import { offsetLimitPagination } from "@apollo/client/utilities"; +import { preventUnhandledRejection } from "@apollo/client/utilities/internal"; +import { invariant } from "@apollo/client/utilities/invariant"; + +async function renderSuspenseHook< + TData, + TVariables extends OperationVariables, + Props = never, +>( + renderHook: ( + props: Props extends never ? undefined : Props + ) => useSuspenseQuery.Result, + options: Pick & { initialProps?: Props } +) { + function UseSuspenseQuery({ props }: { props: Props | undefined }) { + useTrackRenders({ name: "useSuspenseQuery" }); + replaceSnapshot(renderHook(props as any)); + + return null; + } + + function SuspenseFallback() { + useTrackRenders({ name: "SuspenseFallback" }); + + return null; + } + + function ErrorFallback() { + useTrackRenders({ name: "ErrorBoundary" }); + + return null; + } + + function App({ props }: { props: Props | undefined }) { + return ( + }> + replaceSnapshot({ error })} + > + + + + ); + } + + const { render, takeRender, replaceSnapshot, getCurrentRender } = + createRenderStream< + useSuspenseQuery.Result | { error: ErrorLike } + >({ skipNonTrackingRenders: true }); + + const utils = await render(, options); + + function rerender(props: Props) { + return utils.rerender(); + } + + function getCurrentSnapshot() { + const { snapshot } = getCurrentRender(); + + invariant("data" in snapshot, "Snapshot is not a hook snapshot"); + + return snapshot; + } + + return { getCurrentSnapshot, takeRender, rerender }; +} + +function createLink(rootValue?: unknown) { + return new ApolloLink((operation) => { + return from( + executeSchemaGraphQL17Alpha9( + friendListSchemaGraphQL17Alpha9, + operation.query, + rootValue + ) + ); + }); +} + +const friends = [ + { name: "Luke", id: 1 }, + { name: "Han", id: 2 }, + { name: "Leia", id: 3 }, +]; + +type Friend = (typeof friends)[number]; + +test("suspends streamed queries until initial chunk loads then streams in data as it loads", async () => { + const { stream, subject } = asyncIterableSubject(); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: async () => { + return stream; + }, + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test.each([ + "cache-first", + "network-only", + "no-cache", + "cache-and-network", +])( + 'suspends streamed queries until initial chunk loads then streams in data as it loads when using a "%s" fetch policy', + async (fetchPolicy) => { + const { stream, subject } = asyncIterableSubject(); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ friendList: () => stream }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); + } +); + +test('does not suspend streamed queries with data in the cache and using a "cache-first" fetch policy', async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + // Use a query without `@stream` to ensure it doesn't affect the cache + query: gql` + query { + friendList { + id + name + } + } + `, + data: { + friendList: friends.map((friend) => ({ + __typename: "Friend", + ...friend, + })), + }, + }); + + const client = new ApolloClient({ + cache, + link: ApolloLink.empty(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-first" }), + { + wrapper: createClientWrapper(client), + } + ); + + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: friends.map((friend) => ({ + __typename: "Friend", + ...friend, + })), + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + await expect(takeRender).not.toRerender(); +}); + +test.failing( + 'does not suspend streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', + async () => { + const { subject, stream } = asyncIterableSubject(); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const cache = new InMemoryCache(); + + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + { + using _consoleSpy = spyOnConsole("error"); + cache.writeQuery({ + query, + data: { + friendList: friends.map((friend) => ({ + __typename: "Friend", + id: String(friend.id), + })), + }, + }); + } + + const client = new ApolloClient({ + cache, + link: createLink({ friendList: () => stream }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => + useSuspenseQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { + wrapper: createClientWrapper(client), + } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: friends.map((friend) => ({ + __typename: "Friend", + id: String(friend.id), + })), + }, + dataState: "partial", + networkStatus: NetworkStatus.loading, + error: undefined, + }); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); + } +); + +test.failing( + 'does not suspend streamed queries with data in the cache and using a "cache-and-network" fetch policy', + async () => { + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ friendList: () => stream }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + client.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-and-network" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.loading, + error: undefined, + }); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friends: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); + } +); + +test.failing( + "incrementally rerenders data returned by a `refetch` for a streamed query", + async () => { + let subject!: Subject; + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ + friendList: () => { + const result = asyncIterableSubject(); + subject = result.subject; + + return result.stream; + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const refetchPromise = getCurrentSnapshot().refetch(); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next({ id: 1, name: "Luke (refetch)" }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke (refetch)" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next({ id: 2, name: "Han (refetch)" }); + subject.next({ id: 3, name: "Leia (refetch)" }); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke (refetch)" }, + { __typename: "Friend", id: "2", name: "Han (refetch)" }, + { __typename: "Friend", id: "3", name: "Leia (refetch)" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(refetchPromise).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke (refetch)" }, + { __typename: "Friend", id: "2", name: "Han (refetch)" }, + { __typename: "Friend", id: "3", name: "Leia (refetch)" }, + ], + }, + }); + } +); + +test("incrementally renders data returned after skipping a streamed query", async () => { + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using __disabledAct = disableActEnvironment(); + const { takeRender, rerender } = await renderSuspenseHook( + ({ skip }) => useSuspenseQuery(query, { skip }), + { + initialProps: { skip: true }, + wrapper: createClientWrapper(client), + } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: undefined, + dataState: "empty", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await rerender({ skip: false }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +// TODO: This test is a bit of a lie. `fetchMore` should incrementally +// rerender when using `@defer` but there is currently a bug in the core +// implementation that prevents updates until the final result is returned. +// This test reflects the behavior as it exists today, but will need +// to be updated once the core bug is fixed. +// +// NOTE: A duplicate it.failng test has been added right below this one with +// the expected behavior added in (i.e. the commented code in this test). Once +// the core bug is fixed, this test can be removed in favor of the other test. +// +// https://github.com/apollographql/apollo-client/issues/11034 +test.failing( + "rerenders data returned by `fetchMore` for a streamed query", + async () => { + let subject!: Subject; + const query = gql` + query ($offset: Int) { + friendList(offset: $offset) @stream(initialCount: 1) { + id + name + } + } + `; + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + greetings: offsetLimitPagination(), + }, + }, + }, + }); + + const client = new ApolloClient({ + link: createLink({ + friendList: () => { + const iterator = asyncIterableSubject(); + subject = iterator.subject; + + return iterator.stream; + }, + }), + cache, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const fetchMorePromise = getCurrentSnapshot().fetchMore({ + variables: { offset: 2 }, + }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[2]); + + // TODO: Re-enable once the core bug is fixed + // { + // const { snapshot, renderedComponents } = await takeRender(); + // + // expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + // expect(snapshot).toStrictEqualTyped({ + // data: markAsStreaming({ + // friendList: [ + // { __typename: "Friend", id: "1", name: "Luke" }, + // { __typename: "Friend", id: "2", name: "Han" }, + // { __typename: "Friend", id: "3", name: "Leia" }, + // ], + // }), + // dataState: "streaming", + // networkStatus: NetworkStatus.streaming, + // error: undefined, + // }); + // } + + await wait(0); + subject.next({ id: 4, name: "Chewbacca" }); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + { __typename: "Friend", id: "4", name: "Chewbacca" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(fetchMorePromise).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "3", name: "Leia" }, + { __typename: "Friend", id: "4", name: "Chewbacca" }, + ], + }, + }); + + await expect(takeRender).not.toRerender(); + } +); + +// TODO: This is a duplicate of the test above, but with the expected behavior +// added (hence the `it.failing`). Remove the previous test once issue #11034 +// is fixed. +// +// https://github.com/apollographql/apollo-client/issues/11034 +test.failing( + "incrementally rerenders data returned by a `fetchMore` for a streamed query", + async () => { + let subject!: Subject; + const query = gql` + query ($offset: Int) { + friendList(offset: $offset) @stream(initialCount: 1) { + id + name + } + } + `; + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + greetings: offsetLimitPagination(), + }, + }, + }, + }); + + const client = new ApolloClient({ + link: createLink({ + friendList: () => { + const iterator = asyncIterableSubject(); + subject = iterator.subject; + + return iterator.stream; + }, + }), + cache, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const fetchMorePromise = getCurrentSnapshot().fetchMore({ + variables: { offset: 2 }, + }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[2]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + await wait(0); + subject.next({ id: 4, name: "Chewbacca" }); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + { __typename: "Friend", id: "4", name: "Chewbacca" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(fetchMorePromise).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "3", name: "Leia" }, + { __typename: "Friend", id: "4", name: "Chewbacca" }, + ], + }, + }); + + await expect(takeRender).not.toRerender(); + } +); + +test("throws network errors returned by streamed queries", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink(() => { + return throwError(() => new Error("Could not fetch")).pipe(delay(20)); + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); + expect(snapshot).toStrictEqualTyped({ + error: new Error("Could not fetch"), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("throws graphql errors returned by streamed queries", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: async () => { + await wait(20); + throw new Error("Could not get friend list"); + }, + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); + expect(snapshot).toStrictEqualTyped({ + error: new CombinedGraphQLErrors({ + data: { friendList: null }, + errors: [ + { message: "Could not get friend list", path: ["friendList"] }, + ], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("discards partial data and throws errors returned in incremental chunks", async () => { + const { stream, subject } = asyncIterableSubject(); + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: async function* () { + for await (const friend of stream) { + if (friend.id === 2) { + throw new Error("Could not get friend"); + } + + yield friend; + } + }, + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); + expect(snapshot).toStrictEqualTyped({ + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + errors: [{ message: "Could not get friend", path: ["friendList"] }], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test.failing( + "adds partial data and does not throw errors returned in incremental chunks but returns them in `error` property with errorPolicy set to `all`", + async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: () => { + return friends.map((f, i) => { + if (i === 1) { + return preventUnhandledRejection( + Promise.reject(new Error("Could not get friend")) + ); + } + + return { + id: f.id, + name: wait(i * 50).then(() => f.name), + }; + }); + }, + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [ + { message: "Could not get friend", path: ["friendList", 1] }, + ], + }), + }); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [ + { message: "Could not get friend", path: ["friendList", 1] }, + ], + }), + }); + } + + await expect(takeRender).not.toRerender(); + } +); + +test.failing( + "adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", + async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: () => { + return friends.map((f, i) => { + if (i === 1) { + return preventUnhandledRejection( + Promise.reject(new Error("Could not get friend")) + ); + } + + return { + id: f.id, + name: wait(i * 50).then(() => f.name), + }; + }); + }, + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); + } +); + +test.failing( + "can refetch and respond to cache updates after encountering an error in an incremental chunk for a streamed query when `errorPolicy` is `all`", + async () => { + let returnError = true; + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: () => { + return friends.map((f, i) => { + if (i === 1 && returnError) { + return preventUnhandledRejection( + Promise.reject(new Error("Could not get friend")) + ); + } + + return { + id: f.id, + name: wait(i * 50).then(() => f.name), + }; + }); + }, + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [ + { message: "Could not get friend", path: ["friendList", 1] }, + ], + }), + }); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [ + { message: "Could not get friend", path: ["friendList", 1] }, + ], + }), + }); + } + + returnError = false; + const refetchPromise = getCurrentSnapshot().refetch(); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(refetchPromise).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + }); + + client.cache.updateQuery({ query }, (data) => ({ + friendList: [ + { ...data.friendList[0], name: "Luke (updated)" }, + ...data.friendList.slice(1), + ], + })); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke (updated)" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); + } +); From 02bde88110101aff0567c571d3df65535b91c6f9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 19:21:35 -0600 Subject: [PATCH 159/254] Fix eslint issue --- .../handlers/__tests__/graphql17Alpha9/stream.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 9f2b40356ef..5170ce2d25c 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -10,10 +10,10 @@ import { } from "graphql-17-alpha9"; import { from } from "rxjs"; +import type { DocumentNode } from "@apollo/client"; import { ApolloClient, ApolloLink, - DocumentNode, gql, InMemoryCache, NetworkStatus, From ef6f5b821854497add5439ba79250a7f1b13873d Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 21:22:49 -0600 Subject: [PATCH 160/254] WIP cache stream update --- .../__tests__/graphql17Alpha9/stream.test.ts | 214 ++++++++++++++++++ .../streamGraphQL17Alpha9.test.tsx | 24 +- 2 files changed, 226 insertions(+), 12 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 5170ce2d25c..a64b6086e31 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -2409,3 +2409,217 @@ test("GraphQL17Alpha9Handler can be used with `ApolloClient`", async () => { partial: false, }); }); + +test("properly merges streamed data into cache data", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke Cached", id: "1" }, + { name: "Han Cached", id: "2" }, + { name: "Leia Cached", id: "3" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia Cached", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia Cached", id: "3" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } +}); + +test("properly merges streamed data into partial cache data", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { id: "3" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } +}); + +test("properly merges streamed data into list with fewer items", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { id: "3" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } +}); diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index 39e93447e45..2102aee2261 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -369,7 +369,7 @@ test('does not suspend streamed queries with data in the cache and using a "cach await expect(takeRender).not.toRerender(); }); -test.failing( +test.skip.failing( 'does not suspend streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { const { subject, stream } = asyncIterableSubject(); @@ -500,7 +500,7 @@ test.failing( } ); -test.failing( +test.skip.failing( 'does not suspend streamed queries with data in the cache and using a "cache-and-network" fetch policy', async () => { const { stream, subject } = asyncIterableSubject(); @@ -600,7 +600,7 @@ test.failing( } ); -test.failing( +test.skip.failing( "incrementally rerenders data returned by a `refetch` for a streamed query", async () => { let subject!: Subject; @@ -738,7 +738,7 @@ test.failing( } ); -test("incrementally renders data returned after skipping a streamed query", async () => { +test.skip("incrementally renders data returned after skipping a streamed query", async () => { const { stream, subject } = asyncIterableSubject(); const query = gql` query { @@ -836,7 +836,7 @@ test("incrementally renders data returned after skipping a streamed query", asyn // the core bug is fixed, this test can be removed in favor of the other test. // // https://github.com/apollographql/apollo-client/issues/11034 -test.failing( +test.skip.failing( "rerenders data returned by `fetchMore` for a streamed query", async () => { let subject!: Subject; @@ -992,7 +992,7 @@ test.failing( // is fixed. // // https://github.com/apollographql/apollo-client/issues/11034 -test.failing( +test.skip.failing( "incrementally rerenders data returned by a `fetchMore` for a streamed query", async () => { let subject!: Subject; @@ -1142,7 +1142,7 @@ test.failing( } ); -test("throws network errors returned by streamed queries", async () => { +test.skip("throws network errors returned by streamed queries", async () => { using _consoleSpy = spyOnConsole("error"); const query = gql` @@ -1186,7 +1186,7 @@ test("throws network errors returned by streamed queries", async () => { await expect(takeRender).not.toRerender(); }); -test("throws graphql errors returned by streamed queries", async () => { +test.skip("throws graphql errors returned by streamed queries", async () => { using _consoleSpy = spyOnConsole("error"); const query = gql` @@ -1238,7 +1238,7 @@ test("throws graphql errors returned by streamed queries", async () => { await expect(takeRender).not.toRerender(); }); -test("discards partial data and throws errors returned in incremental chunks", async () => { +test.skip("discards partial data and throws errors returned in incremental chunks", async () => { const { stream, subject } = asyncIterableSubject(); using _consoleSpy = spyOnConsole("error"); @@ -1314,7 +1314,7 @@ test("discards partial data and throws errors returned in incremental chunks", a await expect(takeRender).not.toRerender(); }); -test.failing( +test.skip.failing( "adds partial data and does not throw errors returned in incremental chunks but returns them in `error` property with errorPolicy set to `all`", async () => { const query = gql` @@ -1413,7 +1413,7 @@ test.failing( } ); -test.failing( +test.skip.failing( "adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", async () => { const query = gql` @@ -1494,7 +1494,7 @@ test.failing( } ); -test.failing( +test.skip.failing( "can refetch and respond to cache updates after encountering an error in an incremental chunk for a streamed query when `errorPolicy` is `all`", async () => { let returnError = true; From 208ddd0317be6e7fb5e53e54b7e35b3c00a0304b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 22:27:12 -0600 Subject: [PATCH 161/254] Don't set this.data to cacheData and instead merge at the end --- src/incremental/handlers/graphql17Alpha9.ts | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 58ba0b79cae..a9fac8090ed 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -82,6 +82,13 @@ class IncrementalRequest { hasNext = true; + // `this.data` represents the merged results of all raw chunk data without + // cache data mixed in. This makes it easier to track incremental @stream + // chunks since they can be concatenated with the results streamed directly + // from the server, rather than concatenated with a cache list that might + // already have a non-zero length. Cache data is deep merged with this.data at + // the end to ensure this.data overwrites array indexes from increemntal + // chunks at the right location. private data: any = {}; private errors: GraphQLFormattedError[] = []; private extensions: Record = {}; @@ -92,7 +99,10 @@ class IncrementalRequest chunk: GraphQL17Alpha9Handler.Chunk ): FormattedExecutionResult { this.hasNext = chunk.hasNext; - this.data = cacheData; + + if ("data" in chunk) { + this.data = chunk.data; + } if (chunk.pending) { this.pending.push(...chunk.pending); @@ -146,7 +156,9 @@ class IncrementalRequest } } - const result: FormattedExecutionResult = { data: this.data }; + const result: FormattedExecutionResult = { + data: new DeepMerger().merge(cacheData, this.data), + }; if (isNonEmptyArray(this.errors)) { result.errors = this.errors; From 6e72cfa1e1835e535b47ee383ea2ba5e8a944d73 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 22:29:24 -0600 Subject: [PATCH 162/254] Extract helper to deep merge --- src/incremental/handlers/graphql17Alpha9.ts | 28 ++++++++++----------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index a9fac8090ed..21545801e93 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -108,7 +108,7 @@ class IncrementalRequest this.pending.push(...chunk.pending); } - this.merge(chunk, new DeepMerger()); + this.merge(chunk); if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { @@ -135,14 +135,11 @@ class IncrementalRequest data = parent as typeof data; } - this.merge( - { - data, - extensions: incremental.extensions, - errors: incremental.errors, - }, - new DeepMerger() - ); + this.merge({ + data, + extensions: incremental.extensions, + errors: incremental.errors, + }); } } @@ -157,7 +154,7 @@ class IncrementalRequest } const result: FormattedExecutionResult = { - data: new DeepMerger().merge(cacheData, this.data), + data: deepMerge(cacheData, this.data), }; if (isNonEmptyArray(this.errors)) { @@ -171,12 +168,9 @@ class IncrementalRequest return result; } - private merge( - normalized: FormattedExecutionResult, - merger: DeepMerger - ) { + private merge(normalized: FormattedExecutionResult) { if (normalized.data !== undefined) { - this.data = merger.merge(this.data, normalized.data); + this.data = deepMerge(this.data, normalized.data); } if (normalized.errors) { @@ -187,6 +181,10 @@ class IncrementalRequest } } +function deepMerge(target: T, source: T): T { + return new DeepMerger().merge(target, source); +} + /** * Provides handling for the incremental delivery specification implemented by * graphql.js version `17.0.0-alpha.9`. From 9961449fc46ea6e12213fd7c8c55e530f1de5fbf Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 22:35:28 -0600 Subject: [PATCH 163/254] Add additional test cases --- .../__tests__/graphql17Alpha9/stream.test.ts | 82 ++++++++++++++++++- 1 file changed, 79 insertions(+), 3 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index a64b6086e31..e3ae22def74 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -2579,7 +2579,7 @@ test("properly merges streamed data into list with fewer items", async () => { expect( request.handle( { - friendList: [{ id: "1" }, { id: "2" }, { id: "3" }], + friendList: [{ id: "1", name: "Luke Cached" }], }, chunk ) @@ -2588,7 +2588,6 @@ test("properly merges streamed data into list with fewer items", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { id: "3" }, ], }, }); @@ -2606,7 +2605,6 @@ test("properly merges streamed data into list with fewer items", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { id: "3" }, ], }, chunk @@ -2623,3 +2621,81 @@ test("properly merges streamed data into list with fewer items", async () => { expect(request.hasNext).toBe(false); } }); + +test("properly merges streamed data into list with more items", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke Cached", id: "1" }, + { name: "Han Cached", id: "2" }, + { name: "Leia Cached", id: "3" }, + { name: "Chewbacca Cached", id: "4" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia Cached", id: "3" }, + { name: "Chewbacca Cached", id: "4" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia Cached", id: "3" }, + { name: "Chewbacca Cached", id: "4" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + { name: "Chewbacca Cached", id: "4" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } +}); From b87639a6b31fb45c5dbf7498e0d556c9b2b14d57 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 22:49:25 -0600 Subject: [PATCH 164/254] Re-enable most tests --- .../streamGraphQL17Alpha9.test.tsx | 611 +++++++++--------- 1 file changed, 304 insertions(+), 307 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index 2102aee2261..f56beae502f 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -369,158 +369,179 @@ test('does not suspend streamed queries with data in the cache and using a "cach await expect(takeRender).not.toRerender(); }); -test.skip.failing( - 'does not suspend streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', - async () => { - const { subject, stream } = asyncIterableSubject(); +// TODO: Determine how we handle partial data with streamed responses. While this +// works as expected and renders correctly, this also emits missing field +// warnings in the console when writing the result to the cache since array items +// with partial cache data are still included for items that haven't streamed in +// yet. +test('does not suspend streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); + const { subject, stream } = asyncIterableSubject(); - const query = gql` - query { - friendList @stream(initialCount: 1) { - id - name - } + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name } - `; - - const cache = new InMemoryCache(); - - // We are intentionally writing partial data to the cache. Supress console - // warnings to avoid unnecessary noise in the test. - { - using _consoleSpy = spyOnConsole("error"); - cache.writeQuery({ - query, - data: { - friendList: friends.map((friend) => ({ - __typename: "Friend", - id: String(friend.id), - })), - }, - }); } + `; - const client = new ApolloClient({ - cache, - link: createLink({ friendList: () => stream }), - incrementalHandler: new GraphQL17Alpha9Handler(), + const cache = new InMemoryCache(); + + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + { + // using _consoleSpy = spyOnConsole("error"); + cache.writeQuery({ + query, + data: { + friendList: friends.map((friend) => ({ + __typename: "Friend", + id: String(friend.id), + })), + }, }); + } - using _disabledAct = disableActEnvironment(); - const { takeRender } = await renderSuspenseHook( - () => - useSuspenseQuery(query, { - fetchPolicy: "cache-first", - returnPartialData: true, - }), - { - wrapper: createClientWrapper(client), - } - ); + const client = new ApolloClient({ + cache, + link: createLink({ friendList: () => stream }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => + useSuspenseQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: friends.map((friend) => ({ - __typename: "Friend", - id: String(friend.id), - })), - }, - dataState: "partial", - networkStatus: NetworkStatus.loading, - error: undefined, - }); + wrapper: createClientWrapper(client), } + ); - subject.next(friends[0]); + { + const { snapshot, renderedComponents } = await takeRender(); - { - const { snapshot, renderedComponents } = await takeRender(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: friends.map((friend) => ({ + __typename: "Friend", + id: String(friend.id), + })), + }, + dataState: "partial", + networkStatus: NetworkStatus.loading, + error: undefined, + }); + } - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + subject.next(friends[0]); - subject.next(friends[0]); + { + const { snapshot, renderedComponents } = await takeRender(); - { - const { snapshot, renderedComponents } = await takeRender(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + subject.next(friends[1]); - subject.next(friends[1]); - subject.complete(); + { + const { snapshot, renderedComponents } = await takeRender(); - { - const { snapshot, renderedComponents } = await takeRender(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }), - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + subject.next(friends[2]); + subject.complete(); - await expect(takeRender).not.toRerender(); + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); } -); -test.skip.failing( - 'does not suspend streamed queries with data in the cache and using a "cache-and-network" fetch policy', - async () => { - const { stream, subject } = asyncIterableSubject(); - const query = gql` - query { - friendList @stream(initialCount: 1) { - id - name - } + await expect(takeRender).not.toRerender(); +}); + +test('does not suspend streamed queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name } - `; + } + `; - const client = new ApolloClient({ - cache: new InMemoryCache(), - link: createLink({ friendList: () => stream }), - incrementalHandler: new GraphQL17Alpha9Handler(), - }); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ friendList: () => stream }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); - client.writeQuery({ - query, + client.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-and-network" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ data: { friendList: [ { __typename: "Friend", id: "1", name: "Cached Luke" }, @@ -528,205 +549,168 @@ test.skip.failing( { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, + dataState: "complete", + networkStatus: NetworkStatus.loading, + error: undefined, }); + } - using _disabledAct = disableActEnvironment(); - const { takeRender } = await renderSuspenseHook( - () => useSuspenseQuery(query, { fetchPolicy: "cache-and-network" }), - { wrapper: createClientWrapper(client) } - ); - - { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Cached Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.loading, - error: undefined, - }); - } - - subject.next(friends[0]); - - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[0]); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - subject.next(friends[1]); - subject.next(friends[2]); - subject.complete(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[1]); + subject.next(friends[2]); + subject.complete(); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friends: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - await expect(takeRender).not.toRerender(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); } -); -test.skip.failing( - "incrementally rerenders data returned by a `refetch` for a streamed query", - async () => { - let subject!: Subject; - const query = gql` - query { - friendList @stream(initialCount: 1) { - id - name - } - } - `; + await expect(takeRender).not.toRerender(); +}); - const client = new ApolloClient({ - link: createLink({ - friendList: () => { - const result = asyncIterableSubject(); - subject = result.subject; +test("incrementally rerenders data returned by a `refetch` for a streamed query", async () => { + let subject!: Subject; + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; - return result.stream; - }, - }), - cache: new InMemoryCache(), - incrementalHandler: new GraphQL17Alpha9Handler(), - }); + const client = new ApolloClient({ + link: createLink({ + friendList: () => { + const iterable = asyncIterableSubject(); + subject = iterable.subject; - using _disabledAct = disableActEnvironment(); - const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( - () => useSuspenseQuery(query), - { wrapper: createClientWrapper(client) } - ); + return iterable.stream; + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); - { - const { renderedComponents } = await takeRender(); + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { wrapper: createClientWrapper(client) } + ); - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } + { + const { renderedComponents } = await takeRender(); - subject.next(friends[0]); + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[0]); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - subject.next(friends[1]); - subject.next(friends[2]); - subject.complete(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[1]); + subject.next(friends[2]); + subject.complete(); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - const refetchPromise = getCurrentSnapshot().refetch(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } - { - const { renderedComponents } = await takeRender(); + const refetchPromise = getCurrentSnapshot().refetch(); - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } + { + const { renderedComponents } = await takeRender(); - subject.next({ id: 1, name: "Luke (refetch)" }); + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next({ id: 1, name: "Luke (refetch)" }); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke (refetch)" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - subject.next({ id: 2, name: "Han (refetch)" }); - subject.next({ id: 3, name: "Leia (refetch)" }); - subject.complete(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke (refetch)" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next({ id: 2, name: "Han (refetch)" }); + subject.next({ id: 3, name: "Leia (refetch)" }); + subject.complete(); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke (refetch)" }, - { __typename: "Friend", id: "2", name: "Han (refetch)" }, - { __typename: "Friend", id: "3", name: "Leia (refetch)" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - await expect(refetchPromise).resolves.toStrictEqualTyped({ + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ data: { friendList: [ { __typename: "Friend", id: "1", name: "Luke (refetch)" }, @@ -734,11 +718,24 @@ test.skip.failing( { __typename: "Friend", id: "3", name: "Leia (refetch)" }, ], }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, }); } -); -test.skip("incrementally renders data returned after skipping a streamed query", async () => { + await expect(refetchPromise).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke (refetch)" }, + { __typename: "Friend", id: "2", name: "Han (refetch)" }, + { __typename: "Friend", id: "3", name: "Leia (refetch)" }, + ], + }, + }); +}); + +test("incrementally renders data returned after skipping a streamed query", async () => { const { stream, subject } = asyncIterableSubject(); const query = gql` query { @@ -836,7 +833,7 @@ test.skip("incrementally renders data returned after skipping a streamed query", // the core bug is fixed, this test can be removed in favor of the other test. // // https://github.com/apollographql/apollo-client/issues/11034 -test.skip.failing( +test.failing( "rerenders data returned by `fetchMore` for a streamed query", async () => { let subject!: Subject; @@ -992,7 +989,7 @@ test.skip.failing( // is fixed. // // https://github.com/apollographql/apollo-client/issues/11034 -test.skip.failing( +test.failing( "incrementally rerenders data returned by a `fetchMore` for a streamed query", async () => { let subject!: Subject; @@ -1142,7 +1139,7 @@ test.skip.failing( } ); -test.skip("throws network errors returned by streamed queries", async () => { +test("throws network errors returned by streamed queries", async () => { using _consoleSpy = spyOnConsole("error"); const query = gql` @@ -1186,7 +1183,7 @@ test.skip("throws network errors returned by streamed queries", async () => { await expect(takeRender).not.toRerender(); }); -test.skip("throws graphql errors returned by streamed queries", async () => { +test("throws graphql errors returned by streamed queries", async () => { using _consoleSpy = spyOnConsole("error"); const query = gql` @@ -1238,7 +1235,7 @@ test.skip("throws graphql errors returned by streamed queries", async () => { await expect(takeRender).not.toRerender(); }); -test.skip("discards partial data and throws errors returned in incremental chunks", async () => { +test("discards partial data and throws errors returned in incremental chunks", async () => { const { stream, subject } = asyncIterableSubject(); using _consoleSpy = spyOnConsole("error"); @@ -1314,7 +1311,7 @@ test.skip("discards partial data and throws errors returned in incremental chunk await expect(takeRender).not.toRerender(); }); -test.skip.failing( +test.failing( "adds partial data and does not throw errors returned in incremental chunks but returns them in `error` property with errorPolicy set to `all`", async () => { const query = gql` @@ -1413,7 +1410,7 @@ test.skip.failing( } ); -test.skip.failing( +test.failing( "adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", async () => { const query = gql` @@ -1494,7 +1491,7 @@ test.skip.failing( } ); -test.skip.failing( +test.failing( "can refetch and respond to cache updates after encountering an error in an incremental chunk for a streamed query when `errorPolicy` is `all`", async () => { let returnError = true; From 1addfc101f34bfa3f519ec23284ea06913c9f0ea Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 23:06:10 -0600 Subject: [PATCH 165/254] Maintain a queue of the last delivery in case there are no listeners --- src/react/internal/cache/QueryReference.ts | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/react/internal/cache/QueryReference.ts b/src/react/internal/cache/QueryReference.ts index cef2a7f6ec2..e02c76303f9 100644 --- a/src/react/internal/cache/QueryReference.ts +++ b/src/react/internal/cache/QueryReference.ts @@ -186,6 +186,7 @@ export class InternalQueryReference< public promise!: QueryRefPromise; + private queue: QueryRefPromise | undefined; private subscription!: Subscription; private listeners = new Set>(); private autoDisposeTimeoutId?: NodeJS.Timeout; @@ -335,6 +336,11 @@ export class InternalQueryReference< listen(listener: Listener) { this.listeners.add(listener); + if (this.queue) { + this.deliver(this.queue); + this.queue = undefined; + } + return () => { this.listeners.delete(listener); }; @@ -412,6 +418,18 @@ export class InternalQueryReference< } private deliver(promise: QueryRefPromise) { + // Maintain a queue of the last item we tried to deliver so that we can + // deliver it as soon as we get the first listener. This helps in cases such + // as `@stream` where React may render a component and incremental results + // are loaded in between when the component renders and effects are run. If + // effects are run after the incremntal chunks are delivered, we'll have + // rendered a stale value. The queue ensures we can deliver the most + // up-to-date value as soon as the component is ready to listen for new + // values. + if (this.listeners.size === 0) { + this.queue = promise; + } + this.listeners.forEach((listener) => listener(promise)); } From 172da1cd192187369f2a6a08c179e7a3d19ec0a2 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 23:07:41 -0600 Subject: [PATCH 166/254] Enable some failing tests --- .../streamGraphQL17Alpha9.test.tsx | 286 +++++++++--------- 1 file changed, 138 insertions(+), 148 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index f56beae502f..7a4911482a7 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -1311,77 +1311,83 @@ test("discards partial data and throws errors returned in incremental chunks", a await expect(takeRender).not.toRerender(); }); -test.failing( - "adds partial data and does not throw errors returned in incremental chunks but returns them in `error` property with errorPolicy set to `all`", - async () => { - const query = gql` - query { - friendList @stream(initialCount: 1) { - id - name - } +test("adds partial data and does not throw errors returned in incremental chunks but returns them in `error` property with errorPolicy set to `all`", async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name } - `; + } + `; - const client = new ApolloClient({ - cache: new InMemoryCache(), - link: createLink({ - friendList: () => { - return friends.map((f, i) => { - if (i === 1) { - return preventUnhandledRejection( - Promise.reject(new Error("Could not get friend")) - ); - } + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: () => { + return friends.map((f, i) => { + if (i === 1) { + return preventUnhandledRejection( + Promise.reject(new Error("Could not get friend")) + ); + } - return { - id: f.id, - name: wait(i * 50).then(() => f.name), - }; - }); - }, - }), - incrementalHandler: new GraphQL17Alpha9Handler(), - }); + return { + id: f.id, + name: wait(i * 50).then(() => f.name), + }; + }); + }, + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); - using _disabledAct = disableActEnvironment(); - const { takeRender } = await renderSuspenseHook( - () => useSuspenseQuery(query, { errorPolicy: "all" }), - { wrapper: createClientWrapper(client) } - ); + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { wrapper: createClientWrapper(client) } + ); - { - const { renderedComponents } = await takeRender(); + { + const { renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } - { - const { snapshot, renderedComponents } = await takeRender(); + { + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: new CombinedGraphQLErrors({ + data: { friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: new CombinedGraphQLErrors({ - data: { - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], - }, - errors: [ - { message: "Could not get friend", path: ["friendList", 1] }, - ], - }), - }); - } + }, + errors: [{ message: "Could not get friend", path: ["friendList", 1] }], + }), + }); + } - { - const { snapshot, renderedComponents } = await takeRender(); + { + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.error, + error: new CombinedGraphQLErrors({ data: { friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, @@ -1389,107 +1395,91 @@ test.failing( { __typename: "Friend", id: "3", name: "Leia" }, ], }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: new CombinedGraphQLErrors({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - null, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }, - errors: [ - { message: "Could not get friend", path: ["friendList", 1] }, - ], - }), - }); - } - - await expect(takeRender).not.toRerender(); + errors: [{ message: "Could not get friend", path: ["friendList", 1] }], + }), + }); } -); -test.failing( - "adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", - async () => { - const query = gql` - query { - friendList @stream(initialCount: 1) { - id - name - } - } - `; + await expect(takeRender).not.toRerender(); +}); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link: createLink({ - friendList: () => { - return friends.map((f, i) => { - if (i === 1) { - return preventUnhandledRejection( - Promise.reject(new Error("Could not get friend")) - ); - } +test("adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; - return { - id: f.id, - name: wait(i * 50).then(() => f.name), - }; - }); - }, - }), - incrementalHandler: new GraphQL17Alpha9Handler(), - }); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: () => { + return friends.map((f, i) => { + if (i === 1) { + return preventUnhandledRejection( + Promise.reject(new Error("Could not get friend")) + ); + } - using _disabledAct = disableActEnvironment(); - const { takeRender } = await renderSuspenseHook( - () => useSuspenseQuery(query, { errorPolicy: "ignore" }), - { wrapper: createClientWrapper(client) } - ); + return { + id: f.id, + name: wait(i * 50).then(() => f.name), + }; + }); + }, + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); - { - const { renderedComponents } = await takeRender(); + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { wrapper: createClientWrapper(client) } + ); - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } + { + const { renderedComponents } = await takeRender(); - { - const { snapshot, renderedComponents } = await takeRender(); + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - { - const { snapshot, renderedComponents } = await takeRender(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - null, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - await expect(takeRender).not.toRerender(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); } -); + + await expect(takeRender).not.toRerender(); +}); test.failing( "can refetch and respond to cache updates after encountering an error in an incremental chunk for a streamed query when `errorPolicy` is `all`", From f58d7241efd2c901a035d40a3149f79502eb588c Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 23:20:59 -0600 Subject: [PATCH 167/254] Use subject to control last test --- .../streamGraphQL17Alpha9.test.tsx | 306 +++++++++--------- 1 file changed, 154 insertions(+), 152 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index 7a4911482a7..a6524b0384d 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -1481,161 +1481,150 @@ test("adds partial data and discards errors returned in incremental chunks with await expect(takeRender).not.toRerender(); }); -test.failing( - "can refetch and respond to cache updates after encountering an error in an incremental chunk for a streamed query when `errorPolicy` is `all`", - async () => { - let returnError = true; - const query = gql` - query { - friendList @stream(initialCount: 1) { - id - name - } +test("can refetch and respond to cache updates after encountering an error in an incremental chunk for a streamed query when `errorPolicy` is `all`", async () => { + let subject!: Subject | Friend>; + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name } - `; + } + `; - const client = new ApolloClient({ - cache: new InMemoryCache(), - link: createLink({ - friendList: () => { - return friends.map((f, i) => { - if (i === 1 && returnError) { - return preventUnhandledRejection( - Promise.reject(new Error("Could not get friend")) - ); - } - - return { - id: f.id, - name: wait(i * 50).then(() => f.name), - }; - }); - }, - }), - incrementalHandler: new GraphQL17Alpha9Handler(), - }); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: async () => { + const iterable = asyncIterableSubject | Friend>(); + subject = iterable.subject; - using _disabledAct = disableActEnvironment(); - const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( - () => useSuspenseQuery(query, { errorPolicy: "all" }), - { wrapper: createClientWrapper(client) } - ); + return iterable.stream; + }, + }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); - { - const { renderedComponents } = await takeRender(); + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { wrapper: createClientWrapper(client) } + ); - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } + { + const { renderedComponents } = await takeRender(); - { - const { snapshot, renderedComponents } = await takeRender(); + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ + subject.next(friends[0]); + subject.next(Promise.reject(new Error("Could not get friend"))); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: new CombinedGraphQLErrors({ + data: { friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: new CombinedGraphQLErrors({ - data: { - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], - }, - errors: [ - { message: "Could not get friend", path: ["friendList", 1] }, - ], - }), - }); - } + }, + errors: [{ message: "Could not get friend", path: ["friendList", 1] }], + }), + }); + } - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[2]); + subject.complete(); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "complete", + networkStatus: NetworkStatus.error, + error: new CombinedGraphQLErrors({ + data: { friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, null, { __typename: "Friend", id: "3", name: "Leia" }, ], - }), - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: new CombinedGraphQLErrors({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - null, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }, - errors: [ - { message: "Could not get friend", path: ["friendList", 1] }, - ], - }), - }); - } + }, + errors: [{ message: "Could not get friend", path: ["friendList", 1] }], + }), + }); + } - returnError = false; - const refetchPromise = getCurrentSnapshot().refetch(); + const refetchPromise = getCurrentSnapshot().refetch(); - { - const { renderedComponents } = await takeRender(); + { + const { renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[0]); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - { - const { snapshot, renderedComponents } = await takeRender(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + subject.next(friends[1]); - { - const { snapshot, renderedComponents } = await takeRender(); + { + const { snapshot, renderedComponents } = await takeRender(); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[2]); + subject.complete(); - await expect(refetchPromise).resolves.toStrictEqualTyped({ + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ data: { friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, @@ -1643,33 +1632,46 @@ test.failing( { __typename: "Friend", id: "3", name: "Leia" }, ], }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, }); + } - client.cache.updateQuery({ query }, (data) => ({ + await expect(refetchPromise).resolves.toStrictEqualTyped({ + data: { friendList: [ - { ...data.friendList[0], name: "Luke (updated)" }, - ...data.friendList.slice(1), + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, ], - })); + }, + }); - { - const { snapshot, renderedComponents } = await takeRender(); + client.cache.updateQuery({ query }, (data) => ({ + friendList: [ + { ...data.friendList[0], name: "Luke (updated)" }, + ...data.friendList.slice(1), + ], + })); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke (updated)" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - await expect(takeRender).not.toRerender(); + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke (updated)" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); } -); + + await expect(takeRender).not.toRerender(); +}); From 52aff0428714dd243f6a37db33bc7d2b2e1f390e Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 23:24:04 -0600 Subject: [PATCH 168/254] Use test helpers --- .../streamGraphQL17Alpha9.test.tsx | 46 +++++++------------ 1 file changed, 16 insertions(+), 30 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index a6524b0384d..dd3aafc5d4b 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -1312,6 +1312,7 @@ test("discards partial data and throws errors returned in incremental chunks", a }); test("adds partial data and does not throw errors returned in incremental chunks but returns them in `error` property with errorPolicy set to `all`", async () => { + const { stream, subject } = asyncIterableSubject(); const query = gql` query { friendList @stream(initialCount: 1) { @@ -1323,22 +1324,7 @@ test("adds partial data and does not throw errors returned in incremental chunks const client = new ApolloClient({ cache: new InMemoryCache(), - link: createLink({ - friendList: () => { - return friends.map((f, i) => { - if (i === 1) { - return preventUnhandledRejection( - Promise.reject(new Error("Could not get friend")) - ); - } - - return { - id: f.id, - name: wait(i * 50).then(() => f.name), - }; - }); - }, - }), + link: createLink({ friendList: () => stream }), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -1354,6 +1340,9 @@ test("adds partial data and does not throw errors returned in incremental chunks expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); } + subject.next(friends[0]); + subject.next(Promise.reject(new Error("Could not get friend"))); + { const { snapshot, renderedComponents } = await takeRender(); @@ -1373,6 +1362,9 @@ test("adds partial data and does not throw errors returned in incremental chunks }); } + subject.next(friends[2]); + subject.complete(); + { const { snapshot, renderedComponents } = await takeRender(); @@ -1404,6 +1396,7 @@ test("adds partial data and does not throw errors returned in incremental chunks }); test("adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", async () => { + const { stream, subject } = asyncIterableSubject>(); const query = gql` query { friendList @stream(initialCount: 1) { @@ -1416,20 +1409,7 @@ test("adds partial data and discards errors returned in incremental chunks with const client = new ApolloClient({ cache: new InMemoryCache(), link: createLink({ - friendList: () => { - return friends.map((f, i) => { - if (i === 1) { - return preventUnhandledRejection( - Promise.reject(new Error("Could not get friend")) - ); - } - - return { - id: f.id, - name: wait(i * 50).then(() => f.name), - }; - }); - }, + friendList: () => stream, }), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -1446,6 +1426,9 @@ test("adds partial data and discards errors returned in incremental chunks with expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); } + subject.next(friends[0]); + subject.next(Promise.reject(new Error("Could not get friend"))); + { const { snapshot, renderedComponents } = await takeRender(); @@ -1460,6 +1443,9 @@ test("adds partial data and discards errors returned in incremental chunks with }); } + subject.next(friends[2]); + subject.complete(); + { const { snapshot, renderedComponents } = await takeRender(); From d4dca15da8ca650b6499926ab9664eb1f1572f2c Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 23:34:27 -0600 Subject: [PATCH 169/254] Fix comment --- .../__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index dd3aafc5d4b..bfb3e9fcdb6 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -823,7 +823,7 @@ test("incrementally renders data returned after skipping a streamed query", asyn }); // TODO: This test is a bit of a lie. `fetchMore` should incrementally -// rerender when using `@defer` but there is currently a bug in the core +// rerender when using `@stream` but there is currently a bug in the core // implementation that prevents updates until the final result is returned. // This test reflects the behavior as it exists today, but will need // to be updated once the core bug is fixed. From 65d04e5efa3c8d5f8e4da97e33425f5ffd408413 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 23:34:47 -0600 Subject: [PATCH 170/254] Remove unused import --- .../__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index bfb3e9fcdb6..cba3e80d7fa 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -30,7 +30,6 @@ import { wait, } from "@apollo/client/testing/internal"; import { offsetLimitPagination } from "@apollo/client/utilities"; -import { preventUnhandledRejection } from "@apollo/client/utilities/internal"; import { invariant } from "@apollo/client/utilities/invariant"; async function renderSuspenseHook< From 8a62e8a19b64e98583cea17a864456b51ab37f9b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 23:39:45 -0600 Subject: [PATCH 171/254] Change expect to assert to fix ts error --- .../__tests__/defer20220824/defer.test.ts | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/src/incremental/handlers/__tests__/defer20220824/defer.test.ts b/src/incremental/handlers/__tests__/defer20220824/defer.test.ts index 2d416198617..7ed32d8c991 100644 --- a/src/incremental/handlers/__tests__/defer20220824/defer.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/defer.test.ts @@ -132,7 +132,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -146,7 +146,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -200,7 +200,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -210,7 +210,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -241,7 +241,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: {}, @@ -251,7 +251,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -295,7 +295,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -310,7 +310,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -346,7 +346,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -359,7 +359,7 @@ describe("graphql-js test cases", () => { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -399,7 +399,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { id: "1" } }, @@ -409,7 +409,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -448,7 +448,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { id: "1" } }, @@ -458,7 +458,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -529,7 +529,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(false); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { hero: { id: "1" } }, @@ -539,7 +539,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { @@ -555,7 +555,7 @@ describe("graphql-js test cases", () => { { const { value: chunk, done } = (await incoming.next())!; assert(!done); - expect(handler.isIncrementalResult(chunk)).toBe(true); + assert(handler.isIncrementalResult(chunk)); expect(hasIncrementalChunks(chunk)).toBe(true); expect(request.handle(undefined, chunk)).toStrictEqualTyped({ data: { From a439d54a8f19951c32fd9d9facfc5ed70845202b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Wed, 10 Sep 2025 23:40:24 -0600 Subject: [PATCH 172/254] Update exports snapshot --- src/__tests__/__snapshots__/exports.ts.snap | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap index f043e84aad4..2dd66642204 100644 --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -357,11 +357,16 @@ Array [ "ObservableStream", "actAsync", "addDelayToMocks", + "asyncIterableSubject", "createClientWrapper", "createMockWrapper", "createOperationWithDefaultContext", "enableFakeTimers", + "executeSchemaGraphQL17Alpha2", + "executeSchemaGraphQL17Alpha9", "executeWithDefaultContext", + "friendListSchemaGraphQL17Alpha2", + "friendListSchemaGraphQL17Alpha9", "markAsStreaming", "mockDefer20220824", "mockDeferStreamGraphQL17Alpha9", From ad7a87e8a70feb084615a1e5d009fdb15e0be079 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 09:59:48 -0600 Subject: [PATCH 173/254] Copy useSuspenseQuery stream tests for older spec --- .../streamDefer20220824.test.tsx | 1662 +++++++++++++++++ 1 file changed, 1662 insertions(+) create mode 100644 src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx new file mode 100644 index 00000000000..88f448a4837 --- /dev/null +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx @@ -0,0 +1,1662 @@ +import type { RenderOptions } from "@testing-library/react"; +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import React, { Suspense } from "react"; +import { ErrorBoundary } from "react-error-boundary"; +import type { Subject } from "rxjs"; +import { delay, from, throwError } from "rxjs"; + +import type { ErrorLike, OperationVariables } from "@apollo/client"; +import { + ApolloClient, + ApolloLink, + CombinedGraphQLErrors, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import { useSuspenseQuery } from "@apollo/client/react"; +import { + asyncIterableSubject, + createClientWrapper, + executeSchemaGraphQL17Alpha2, + friendListSchemaGraphQL17Alpha2, + markAsStreaming, + spyOnConsole, + wait, +} from "@apollo/client/testing/internal"; +import { offsetLimitPagination } from "@apollo/client/utilities"; +import { invariant } from "@apollo/client/utilities/invariant"; + +async function renderSuspenseHook< + TData, + TVariables extends OperationVariables, + Props = never, +>( + renderHook: ( + props: Props extends never ? undefined : Props + ) => useSuspenseQuery.Result, + options: Pick & { initialProps?: Props } +) { + function UseSuspenseQuery({ props }: { props: Props | undefined }) { + useTrackRenders({ name: "useSuspenseQuery" }); + replaceSnapshot(renderHook(props as any)); + + return null; + } + + function SuspenseFallback() { + useTrackRenders({ name: "SuspenseFallback" }); + + return null; + } + + function ErrorFallback() { + useTrackRenders({ name: "ErrorBoundary" }); + + return null; + } + + function App({ props }: { props: Props | undefined }) { + return ( + }> + replaceSnapshot({ error })} + > + + + + ); + } + + const { render, takeRender, replaceSnapshot, getCurrentRender } = + createRenderStream< + useSuspenseQuery.Result | { error: ErrorLike } + >({ skipNonTrackingRenders: true }); + + const utils = await render(, options); + + function rerender(props: Props) { + return utils.rerender(); + } + + function getCurrentSnapshot() { + const { snapshot } = getCurrentRender(); + + invariant("data" in snapshot, "Snapshot is not a hook snapshot"); + + return snapshot; + } + + return { getCurrentSnapshot, takeRender, rerender }; +} + +function createLink(rootValue?: unknown) { + return new ApolloLink((operation) => { + return from( + executeSchemaGraphQL17Alpha2( + friendListSchemaGraphQL17Alpha2, + operation.query, + rootValue + ) + ); + }); +} + +const friends = [ + { name: "Luke", id: 1 }, + { name: "Han", id: 2 }, + { name: "Leia", id: 3 }, +]; + +type Friend = (typeof friends)[number]; + +test("suspends streamed queries until initial chunk loads then streams in data as it loads", async () => { + const { stream, subject } = asyncIterableSubject(); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: async () => { + return stream; + }, + }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test.each([ + "cache-first", + "network-only", + "no-cache", + "cache-and-network", +])( + 'suspends streamed queries until initial chunk loads then streams in data as it loads when using a "%s" fetch policy', + async (fetchPolicy) => { + const { stream, subject } = asyncIterableSubject(); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ friendList: () => stream }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); + } +); + +test('does not suspend streamed queries with data in the cache and using a "cache-first" fetch policy', async () => { + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + // Use a query without `@stream` to ensure it doesn't affect the cache + query: gql` + query { + friendList { + id + name + } + } + `, + data: { + friendList: friends.map((friend) => ({ + __typename: "Friend", + ...friend, + })), + }, + }); + + const client = new ApolloClient({ + cache, + link: ApolloLink.empty(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-first" }), + { + wrapper: createClientWrapper(client), + } + ); + + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: friends.map((friend) => ({ + __typename: "Friend", + ...friend, + })), + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + await expect(takeRender).not.toRerender(); +}); + +// TODO: Determine how we handle partial data with streamed responses. While this +// works as expected and renders correctly, this also emits missing field +// warnings in the console when writing the result to the cache since array items +// with partial cache data are still included for items that haven't streamed in +// yet. +test('does not suspend streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); + const { subject, stream } = asyncIterableSubject(); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const cache = new InMemoryCache(); + + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + { + // using _consoleSpy = spyOnConsole("error"); + cache.writeQuery({ + query, + data: { + friendList: friends.map((friend) => ({ + __typename: "Friend", + id: String(friend.id), + })), + }, + }); + } + + const client = new ApolloClient({ + cache, + link: createLink({ friendList: () => stream }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => + useSuspenseQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { + wrapper: createClientWrapper(client), + } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: friends.map((friend) => ({ + __typename: "Friend", + id: String(friend.id), + })), + }, + dataState: "partial", + networkStatus: NetworkStatus.loading, + error: undefined, + }); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test('does not suspend streamed queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ friendList: () => stream }), + incrementalHandler: new Defer20220824Handler(), + }); + + client.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-and-network" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.loading, + error: undefined, + }); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("incrementally rerenders data returned by a `refetch` for a streamed query", async () => { + let subject!: Subject; + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ + friendList: () => { + const iterable = asyncIterableSubject(); + subject = iterable.subject; + + return iterable.stream; + }, + }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const refetchPromise = getCurrentSnapshot().refetch(); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next({ id: 1, name: "Luke (refetch)" }); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke (refetch)" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next({ id: 2, name: "Han (refetch)" }); + subject.next({ id: 3, name: "Leia (refetch)" }); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke (refetch)" }, + { __typename: "Friend", id: "2", name: "Han (refetch)" }, + { __typename: "Friend", id: "3", name: "Leia (refetch)" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(refetchPromise).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke (refetch)" }, + { __typename: "Friend", id: "2", name: "Han (refetch)" }, + { __typename: "Friend", id: "3", name: "Leia (refetch)" }, + ], + }, + }); +}); + +test("incrementally renders data returned after skipping a streamed query", async () => { + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using __disabledAct = disableActEnvironment(); + const { takeRender, rerender } = await renderSuspenseHook( + ({ skip }) => useSuspenseQuery(query, { skip }), + { + initialProps: { skip: true }, + wrapper: createClientWrapper(client), + } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: undefined, + dataState: "empty", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await rerender({ skip: false }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +// TODO: This test is a bit of a lie. `fetchMore` should incrementally +// rerender when using `@stream` but there is currently a bug in the core +// implementation that prevents updates until the final result is returned. +// This test reflects the behavior as it exists today, but will need +// to be updated once the core bug is fixed. +// +// NOTE: A duplicate it.failng test has been added right below this one with +// the expected behavior added in (i.e. the commented code in this test). Once +// the core bug is fixed, this test can be removed in favor of the other test. +// +// https://github.com/apollographql/apollo-client/issues/11034 +test.failing( + "rerenders data returned by `fetchMore` for a streamed query", + async () => { + let subject!: Subject; + const query = gql` + query ($offset: Int) { + friendList(offset: $offset) @stream(initialCount: 1) { + id + name + } + } + `; + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + greetings: offsetLimitPagination(), + }, + }, + }, + }); + + const client = new ApolloClient({ + link: createLink({ + friendList: () => { + const iterator = asyncIterableSubject(); + subject = iterator.subject; + + return iterator.stream; + }, + }), + cache, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const fetchMorePromise = getCurrentSnapshot().fetchMore({ + variables: { offset: 2 }, + }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[2]); + + // TODO: Re-enable once the core bug is fixed + // { + // const { snapshot, renderedComponents } = await takeRender(); + // + // expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + // expect(snapshot).toStrictEqualTyped({ + // data: markAsStreaming({ + // friendList: [ + // { __typename: "Friend", id: "1", name: "Luke" }, + // { __typename: "Friend", id: "2", name: "Han" }, + // { __typename: "Friend", id: "3", name: "Leia" }, + // ], + // }), + // dataState: "streaming", + // networkStatus: NetworkStatus.streaming, + // error: undefined, + // }); + // } + + await wait(0); + subject.next({ id: 4, name: "Chewbacca" }); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + { __typename: "Friend", id: "4", name: "Chewbacca" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(fetchMorePromise).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "3", name: "Leia" }, + { __typename: "Friend", id: "4", name: "Chewbacca" }, + ], + }, + }); + + await expect(takeRender).not.toRerender(); + } +); + +// TODO: This is a duplicate of the test above, but with the expected behavior +// added (hence the `it.failing`). Remove the previous test once issue #11034 +// is fixed. +// +// https://github.com/apollographql/apollo-client/issues/11034 +test.failing( + "incrementally rerenders data returned by a `fetchMore` for a streamed query", + async () => { + let subject!: Subject; + const query = gql` + query ($offset: Int) { + friendList(offset: $offset) @stream(initialCount: 1) { + id + name + } + } + `; + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + greetings: offsetLimitPagination(), + }, + }, + }, + }); + + const client = new ApolloClient({ + link: createLink({ + friendList: () => { + const iterator = asyncIterableSubject(); + subject = iterator.subject; + + return iterator.stream; + }, + }), + cache, + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + const fetchMorePromise = getCurrentSnapshot().fetchMore({ + variables: { offset: 2 }, + }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[2]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + await wait(0); + subject.next({ id: 4, name: "Chewbacca" }); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + { __typename: "Friend", id: "4", name: "Chewbacca" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(fetchMorePromise).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "3", name: "Leia" }, + { __typename: "Friend", id: "4", name: "Chewbacca" }, + ], + }, + }); + + await expect(takeRender).not.toRerender(); + } +); + +test("throws network errors returned by streamed queries", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink(() => { + return throwError(() => new Error("Could not fetch")).pipe(delay(20)); + }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); + expect(snapshot).toStrictEqualTyped({ + error: new Error("Could not fetch"), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("throws graphql errors returned by streamed queries", async () => { + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: async () => { + await wait(20); + throw new Error("Could not get friend list"); + }, + }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); + expect(snapshot).toStrictEqualTyped({ + error: new CombinedGraphQLErrors({ + data: { friendList: null }, + errors: [ + { message: "Could not get friend list", path: ["friendList"] }, + ], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("discards partial data and throws errors returned in incremental chunks", async () => { + const { stream, subject } = asyncIterableSubject(); + using _consoleSpy = spyOnConsole("error"); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: async function* () { + for await (const friend of stream) { + if (friend.id === 2) { + throw new Error("Could not get friend"); + } + + yield friend; + } + }, + }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["ErrorBoundary"]); + expect(snapshot).toStrictEqualTyped({ + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + errors: [{ message: "Could not get friend", path: ["friendList"] }], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("adds partial data and does not throw errors returned in incremental chunks but returns them in `error` property with errorPolicy set to `all`", async () => { + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ friendList: () => stream }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + subject.next(Promise.reject(new Error("Could not get friend"))); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [{ message: "Could not get friend", path: ["friendList", 1] }], + }), + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.error, + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [{ message: "Could not get friend", path: ["friendList", 1] }], + }), + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", async () => { + const { stream, subject } = asyncIterableSubject>(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: () => stream, + }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + subject.next(Promise.reject(new Error("Could not get friend"))); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("can refetch and respond to cache updates after encountering an error in an incremental chunk for a streamed query when `errorPolicy` is `all`", async () => { + let subject!: Subject | Friend>; + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ + friendList: async () => { + const iterable = asyncIterableSubject | Friend>(); + subject = iterable.subject; + + return iterable.stream; + }, + }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + subject.next(Promise.reject(new Error("Could not get friend"))); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [{ message: "Could not get friend", path: ["friendList", 1] }], + }), + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "complete", + networkStatus: NetworkStatus.error, + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [{ message: "Could not get friend", path: ["friendList", 1] }], + }), + }); + } + + const refetchPromise = getCurrentSnapshot().refetch(); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(refetchPromise).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + }); + + client.cache.updateQuery({ query }, (data) => ({ + friendList: [ + { ...data.friendList[0], name: "Luke (updated)" }, + ...data.friendList.slice(1), + ], + })); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke (updated)" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, + }); + } + + await expect(takeRender).not.toRerender(); +}); From 27bc2191732f8dfae9e09c4e4d46d71d99b07b9e Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 11:03:31 -0600 Subject: [PATCH 174/254] Update details for older spec --- .../streamDefer20220824.test.tsx | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx index 88f448a4837..67c3f15d54c 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx @@ -1300,9 +1300,9 @@ test("discards partial data and throws errors returned in incremental chunks", a expect(snapshot).toStrictEqualTyped({ error: new CombinedGraphQLErrors({ data: { - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], }, - errors: [{ message: "Could not get friend", path: ["friendList"] }], + errors: [{ message: "Could not get friend", path: ["friendList", 1] }], }), }); } @@ -1340,7 +1340,7 @@ test("adds partial data and does not throw errors returned in incremental chunks } subject.next(friends[0]); - subject.next(Promise.reject(new Error("Could not get friend"))); + subject.next(new Error("Could not get friend")); { const { snapshot, renderedComponents } = await takeRender(); @@ -1395,7 +1395,7 @@ test("adds partial data and does not throw errors returned in incremental chunks }); test("adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", async () => { - const { stream, subject } = asyncIterableSubject>(); + const { stream, subject } = asyncIterableSubject(); const query = gql` query { friendList @stream(initialCount: 1) { @@ -1426,7 +1426,7 @@ test("adds partial data and discards errors returned in incremental chunks with } subject.next(friends[0]); - subject.next(Promise.reject(new Error("Could not get friend"))); + subject.next(new Error("Could not get friend")); { const { snapshot, renderedComponents } = await takeRender(); @@ -1467,7 +1467,7 @@ test("adds partial data and discards errors returned in incremental chunks with }); test("can refetch and respond to cache updates after encountering an error in an incremental chunk for a streamed query when `errorPolicy` is `all`", async () => { - let subject!: Subject | Friend>; + let subject!: Subject; const query = gql` query { friendList @stream(initialCount: 1) { @@ -1480,8 +1480,8 @@ test("can refetch and respond to cache updates after encountering an error in an const client = new ApolloClient({ cache: new InMemoryCache(), link: createLink({ - friendList: async () => { - const iterable = asyncIterableSubject | Friend>(); + friendList: () => { + const iterable = asyncIterableSubject(); subject = iterable.subject; return iterable.stream; @@ -1503,7 +1503,7 @@ test("can refetch and respond to cache updates after encountering an error in an } subject.next(friends[0]); - subject.next(Promise.reject(new Error("Could not get friend"))); + subject.next(new Error("Could not get friend")); { const { snapshot, renderedComponents } = await takeRender(); From 77eb9c5cacb060bd1e4ede1bbe237d49a72da5ab Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 11:11:49 -0600 Subject: [PATCH 175/254] Revert to older implementation --- src/incremental/handlers/graphql17Alpha9.ts | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 21545801e93..d17b73a578d 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -82,13 +82,6 @@ class IncrementalRequest { hasNext = true; - // `this.data` represents the merged results of all raw chunk data without - // cache data mixed in. This makes it easier to track incremental @stream - // chunks since they can be concatenated with the results streamed directly - // from the server, rather than concatenated with a cache list that might - // already have a non-zero length. Cache data is deep merged with this.data at - // the end to ensure this.data overwrites array indexes from increemntal - // chunks at the right location. private data: any = {}; private errors: GraphQLFormattedError[] = []; private extensions: Record = {}; @@ -99,10 +92,7 @@ class IncrementalRequest chunk: GraphQL17Alpha9Handler.Chunk ): FormattedExecutionResult { this.hasNext = chunk.hasNext; - - if ("data" in chunk) { - this.data = chunk.data; - } + this.data = cacheData; if (chunk.pending) { this.pending.push(...chunk.pending); @@ -153,9 +143,7 @@ class IncrementalRequest } } - const result: FormattedExecutionResult = { - data: deepMerge(cacheData, this.data), - }; + const result: FormattedExecutionResult = { data: this.data }; if (isNonEmptyArray(this.errors)) { result.errors = this.errors; From fbb2f38557026e7bb0e0298a0534a2727011484b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 11:11:59 -0600 Subject: [PATCH 176/254] Inline deepMerge --- src/incremental/handlers/graphql17Alpha9.ts | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index d17b73a578d..d9652bccca1 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -158,7 +158,7 @@ class IncrementalRequest private merge(normalized: FormattedExecutionResult) { if (normalized.data !== undefined) { - this.data = deepMerge(this.data, normalized.data); + this.data = new DeepMerger().merge(this.data, normalized.data); } if (normalized.errors) { @@ -169,10 +169,6 @@ class IncrementalRequest } } -function deepMerge(target: T, source: T): T { - return new DeepMerger().merge(target, source); -} - /** * Provides handling for the incremental delivery specification implemented by * graphql.js version `17.0.0-alpha.9`. From 198279c585f900b27232a363853bbaa4c86ff972 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 11:17:11 -0600 Subject: [PATCH 177/254] Fix missing default from change to shared function --- .../handlers/__tests__/graphql17Alpha9/defer.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts index 1d40bc7fc0a..885a428f267 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts @@ -151,7 +151,7 @@ function resolveOnNextTick(): Promise { function run( document: DocumentNode, - rootValue: unknown = {}, + rootValue: unknown = { hero }, enableEarlyExecution?: boolean ) { return executeSchemaGraphQL17Alpha9( From 356bfe78b8e6f3ebe3f800012acb5eb2035924dc Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 12:53:15 -0600 Subject: [PATCH 178/254] Fix most cases of merging cache with streamed chunks --- src/incremental/handlers/graphql17Alpha9.ts | 35 +++++++++++++++++---- 1 file changed, 29 insertions(+), 6 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index d9652bccca1..978e12eec51 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -86,6 +86,7 @@ class IncrementalRequest private errors: GraphQLFormattedError[] = []; private extensions: Record = {}; private pending: GraphQL17Alpha9Handler.PendingResult[] = []; + private mergedIndexes: Record = {}; handle( cacheData: TData | DeepPartial | null | undefined = this.data, @@ -96,6 +97,19 @@ class IncrementalRequest if (chunk.pending) { this.pending.push(...chunk.pending); + + if ("data" in chunk) { + for (const pending of chunk.pending) { + const dataAtPath = pending.path.reduce( + (data, key) => (data as any)[key], + chunk.data + ); + + if (Array.isArray(dataAtPath)) { + this.mergedIndexes[pending.id] = dataAtPath.length; + } + } + } } this.merge(chunk); @@ -110,12 +124,21 @@ class IncrementalRequest ); const path = pending.path.concat(incremental.subPath ?? []); - let data = - "items" in incremental ? - path - .reduce((data, key) => data[key], this.data) - .concat(incremental.items) - : incremental.data; + + let data: any; + if ("items" in incremental) { + const items = incremental.items as any[]; + const parent: any[] = []; + + for (let i = 0!; i < items.length; i++) { + parent[i + this.mergedIndexes[pending.id]] = items[i]; + } + + this.mergedIndexes[pending.id] += items.length; + data = parent; + } else { + data = incremental.data; + } for (let i = path.length - 1; i >= 0; i--) { const key = path[i]; From 1fdd487044cc9fd52b84838443026f58a0696557 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 13:07:57 -0600 Subject: [PATCH 179/254] Fix issue with non-zero lists sent with defer chunk --- src/incremental/handlers/graphql17Alpha9.ts | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 978e12eec51..6dbe577a048 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -130,6 +130,15 @@ class IncrementalRequest const items = incremental.items as any[]; const parent: any[] = []; + if (!(pending.id in this.mergedIndexes)) { + const dataAtPath = pending.path.reduce( + (data, key) => (data as any)[key], + this.data + ); + + this.mergedIndexes[pending.id] = dataAtPath.length; + } + for (let i = 0!; i < items.length; i++) { parent[i + this.mergedIndexes[pending.id]] = items[i]; } From d366dc1719db040e2af80879ea9fd700ed426887 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 13:08:40 -0600 Subject: [PATCH 180/254] Add additional test to check non-zero length array in defer chunk --- .../__tests__/graphql17Alpha9/stream.test.ts | 97 +++++++++++++++++++ 1 file changed, 97 insertions(+) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index e3ae22def74..f535cba6ace 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -2127,6 +2127,103 @@ describe("graphql-js test cases", () => { } }); + // this test does not exist in the original test suite but added to ensure + // deferred non-empty lists are properly merged + it("Returns payloads in correct order when parent deferred fragment resolves slower than stream with > 0 initialCount", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + + const query = gql` + query { + nestedObject { + ...DeferFragment @defer + } + } + fragment DeferFragment on NestedObject { + scalarField + nestedFriendList @stream(initialCount: 1) { + name + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nestedObject: { + scalarField: () => slowFieldPromise, + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + }, + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: {}, + }, + }); + expect(request.hasNext).toBe(true); + } + + resolveSlowField("slow"); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + }); + expect(request.hasNext).toBe(false); + } + }); + it("Can @defer fields that are resolved after async iterable is complete", async () => { const { promise: slowFieldPromise, resolve: resolveSlowField } = promiseWithResolvers(); From 853b2d7734536653475db6654db72d8648731ba5 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 13:22:29 -0600 Subject: [PATCH 181/254] Add failing test for merging cache data on defer chunk --- .../__tests__/graphql17Alpha9/stream.test.ts | 154 ++++++++++++++++++ 1 file changed, 154 insertions(+) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index f535cba6ace..6ad0b17c939 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -2796,3 +2796,157 @@ test("properly merges streamed data into list with more items", async () => { expect(request.hasNext).toBe(false); } }); + +it("properly merges cache data when list is included in deferred chunk", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + + const query = gql` + query { + nestedObject { + ...DeferFragment @defer + } + } + fragment DeferFragment on NestedObject { + scalarField + nestedFriendList @stream(initialCount: 0) { + name + } + } + `; + + const handler = new GraphQL17Alpha9Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nestedObject: { + scalarField: () => slowFieldPromise, + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + }, + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + nestedObject: { + scalarField: "cached", + nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + }, + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "cached", + nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + resolveSlowField("slow"); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + nestedObject: { + scalarField: "cached", + nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + }, + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + }, + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + }); + expect(request.hasNext).toBe(false); + } +}); From 90c7e46a2666835e383d46ef72119ef3a057d269 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 16:06:53 -0600 Subject: [PATCH 182/254] Fix the failing test --- src/incremental/handlers/graphql17Alpha9.ts | 38 +++++++++++++++------ 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 6dbe577a048..a38323d88c6 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -130,16 +130,10 @@ class IncrementalRequest const items = incremental.items as any[]; const parent: any[] = []; - if (!(pending.id in this.mergedIndexes)) { - const dataAtPath = pending.path.reduce( - (data, key) => (data as any)[key], - this.data - ); - - this.mergedIndexes[pending.id] = dataAtPath.length; - } - - for (let i = 0!; i < items.length; i++) { + // This creates a sparse array with values set at the indices streamed + // from the server. DeepMerger uses Object.keys and will correctly + // place the values in this array in the correct place + for (let i = 0; i < items.length; i++) { parent[i + this.mergedIndexes[pending.id]] = items[i]; } @@ -147,6 +141,28 @@ class IncrementalRequest data = parent; } else { data = incremental.data; + + // For deferred data, check if any pending streams have data here + // and update mergedIndexes accordingly + // Look through all pending items to see if any have arrays in this incremental data + for (const pendingItem of this.pending) { + if (!(pendingItem.id in this.mergedIndexes)) { + // Check if this incremental data contains array data for the pending path + // The pending path is absolute, but incremental data is relative to the defer + // E.g., pending.path = ["nestedObject"], pendingItem.path = ["nestedObject", "nestedFriendList"] + // incremental.data = { scalarField: "...", nestedFriendList: [...] } + // So we need the path from pending.path onwards + const relativePath = pendingItem.path.slice(pending.path.length); + const dataAtPath = relativePath.reduce( + (data, key) => (data as any)?.[key], + incremental.data + ); + + if (Array.isArray(dataAtPath)) { + this.mergedIndexes[pendingItem.id] = dataAtPath.length; + } + } + } } for (let i = path.length - 1; i >= 0; i--) { @@ -154,7 +170,7 @@ class IncrementalRequest const parent: Record = typeof key === "number" ? [] : {}; parent[key] = data; - data = parent as typeof data; + data = parent; } this.merge({ From b5581e1b54a277bf261736b97024f4eb5ff928b9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 16:08:08 -0600 Subject: [PATCH 183/254] Rename property --- src/incremental/handlers/graphql17Alpha9.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index a38323d88c6..33e943e4a52 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -86,7 +86,7 @@ class IncrementalRequest private errors: GraphQLFormattedError[] = []; private extensions: Record = {}; private pending: GraphQL17Alpha9Handler.PendingResult[] = []; - private mergedIndexes: Record = {}; + private streamPositions: Record = {}; handle( cacheData: TData | DeepPartial | null | undefined = this.data, @@ -106,7 +106,7 @@ class IncrementalRequest ); if (Array.isArray(dataAtPath)) { - this.mergedIndexes[pending.id] = dataAtPath.length; + this.streamPositions[pending.id] = dataAtPath.length; } } } @@ -134,10 +134,10 @@ class IncrementalRequest // from the server. DeepMerger uses Object.keys and will correctly // place the values in this array in the correct place for (let i = 0; i < items.length; i++) { - parent[i + this.mergedIndexes[pending.id]] = items[i]; + parent[i + this.streamPositions[pending.id]] = items[i]; } - this.mergedIndexes[pending.id] += items.length; + this.streamPositions[pending.id] += items.length; data = parent; } else { data = incremental.data; @@ -146,7 +146,7 @@ class IncrementalRequest // and update mergedIndexes accordingly // Look through all pending items to see if any have arrays in this incremental data for (const pendingItem of this.pending) { - if (!(pendingItem.id in this.mergedIndexes)) { + if (!(pendingItem.id in this.streamPositions)) { // Check if this incremental data contains array data for the pending path // The pending path is absolute, but incremental data is relative to the defer // E.g., pending.path = ["nestedObject"], pendingItem.path = ["nestedObject", "nestedFriendList"] @@ -159,7 +159,7 @@ class IncrementalRequest ); if (Array.isArray(dataAtPath)) { - this.mergedIndexes[pendingItem.id] = dataAtPath.length; + this.streamPositions[pendingItem.id] = dataAtPath.length; } } } From ae31314440a8a23d6469b8fc811ad6c16224adf3 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 16:15:07 -0600 Subject: [PATCH 184/254] Add comment --- src/incremental/handlers/graphql17Alpha9.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 33e943e4a52..e7fce195d4b 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -86,6 +86,13 @@ class IncrementalRequest private errors: GraphQLFormattedError[] = []; private extensions: Record = {}; private pending: GraphQL17Alpha9Handler.PendingResult[] = []; + // `streamPositions` maps `pending.id` to the index that should be set by the + // next `incremental` stream chunk to ensure the streamed array item is placed + // at the correct point in the data array. `this.data` contains cached + // references with the full array so we can't rely on the array length in + // `this.data` to determine where to place item. This also ensures that items + // updated by the cache between a streamed chunk aren't overwritten by merges + // of future stream items from already merged stream items. private streamPositions: Record = {}; handle( From e67eab2d283a5fe45b54c0d709e28cd9dd79a952 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 16:27:23 -0600 Subject: [PATCH 185/254] Update comment --- src/incremental/handlers/graphql17Alpha9.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index e7fce195d4b..2355ba10b05 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -149,9 +149,10 @@ class IncrementalRequest } else { data = incremental.data; - // For deferred data, check if any pending streams have data here - // and update mergedIndexes accordingly - // Look through all pending items to see if any have arrays in this incremental data + // Check if any pending streams added arrays from deferred data so + // that we can update streamPositions with the initial length of the + // array to ensure future streamed items are inserted at the right + // starting index. for (const pendingItem of this.pending) { if (!(pendingItem.id in this.streamPositions)) { // Check if this incremental data contains array data for the pending path From 12a89fc3f4cd1e2d1f0ff5ee8014a85eebfdbcff Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 16:36:45 -0600 Subject: [PATCH 186/254] it -> test --- .../handlers/__tests__/graphql17Alpha9/stream.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 6ad0b17c939..ac26a6505f0 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -2797,7 +2797,7 @@ test("properly merges streamed data into list with more items", async () => { } }); -it("properly merges cache data when list is included in deferred chunk", async () => { +test("properly merges cache data when list is included in deferred chunk", async () => { const { promise: slowFieldPromise, resolve: resolveSlowField } = promiseWithResolvers(); From d6198aef191bb6ae183f195f816c85f2b54f253f Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 16:40:41 -0600 Subject: [PATCH 187/254] Use shared schema --- .../streamDefer20220824.test.ts | 69 ++----------------- .../streamGraphQL17Alpha9.test.ts | 69 ++----------------- .../__tests__/defer20220824/stream.test.ts | 69 ++----------------- .../__tests__/graphql17Alpha9/stream.test.ts | 65 +---------------- 4 files changed, 20 insertions(+), 252 deletions(-) diff --git a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts index 5c842c3db2b..01ab8f1f78a 100644 --- a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts +++ b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts @@ -1,11 +1,3 @@ -import { - GraphQLID, - GraphQLList, - GraphQLNonNull, - GraphQLObjectType, - GraphQLSchema, - GraphQLString, -} from "graphql-17-alpha2"; import { from } from "rxjs"; import { @@ -19,76 +11,27 @@ import { import { Defer20220824Handler } from "@apollo/client/incremental"; import { executeSchemaGraphQL17Alpha2, + friendListSchemaGraphQL17Alpha2, markAsStreaming, mockDefer20220824, ObservableStream, promiseWithResolvers, } from "@apollo/client/testing/internal"; -const friendType = new GraphQLObjectType({ - fields: { - id: { type: GraphQLID }, - name: { type: GraphQLString }, - nonNullName: { type: new GraphQLNonNull(GraphQLString) }, - }, - name: "Friend", -}); - const friends = [ { name: "Luke", id: 1 }, { name: "Han", id: 2 }, { name: "Leia", id: 3 }, ]; -const query = new GraphQLObjectType({ - fields: { - scalarList: { - type: new GraphQLList(GraphQLString), - }, - scalarListList: { - type: new GraphQLList(new GraphQLList(GraphQLString)), - }, - friendList: { - type: new GraphQLList(friendType), - }, - nonNullFriendList: { - type: new GraphQLList(new GraphQLNonNull(friendType)), - }, - nestedObject: { - type: new GraphQLObjectType({ - name: "NestedObject", - fields: { - scalarField: { - type: GraphQLString, - }, - nonNullScalarField: { - type: new GraphQLNonNull(GraphQLString), - }, - nestedFriendList: { type: new GraphQLList(friendType) }, - deeperNestedObject: { - type: new GraphQLObjectType({ - name: "DeeperNestedObject", - fields: { - nonNullScalarField: { - type: new GraphQLNonNull(GraphQLString), - }, - deeperNestedFriendList: { type: new GraphQLList(friendType) }, - }, - }), - }, - }, - }), - }, - }, - name: "Query", -}); - -const schema = new GraphQLSchema({ query }); - function createLink(rootValue?: Record) { return new ApolloLink((operation) => { return from( - executeSchemaGraphQL17Alpha2(schema, operation.query, rootValue) + executeSchemaGraphQL17Alpha2( + friendListSchemaGraphQL17Alpha2, + operation.query, + rootValue + ) ); }); } diff --git a/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts index c41861e597e..bdcd108a54e 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts @@ -1,11 +1,3 @@ -import { - GraphQLID, - GraphQLList, - GraphQLNonNull, - GraphQLObjectType, - GraphQLSchema, - GraphQLString, -} from "graphql-17-alpha9"; import { from } from "rxjs"; import { @@ -19,76 +11,27 @@ import { import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { executeSchemaGraphQL17Alpha9, + friendListSchemaGraphQL17Alpha9, markAsStreaming, mockDeferStreamGraphQL17Alpha9, ObservableStream, promiseWithResolvers, } from "@apollo/client/testing/internal"; -const friendType = new GraphQLObjectType({ - fields: { - id: { type: GraphQLID }, - name: { type: GraphQLString }, - nonNullName: { type: new GraphQLNonNull(GraphQLString) }, - }, - name: "Friend", -}); - const friends = [ { name: "Luke", id: 1 }, { name: "Han", id: 2 }, { name: "Leia", id: 3 }, ]; -const query = new GraphQLObjectType({ - fields: { - scalarList: { - type: new GraphQLList(GraphQLString), - }, - scalarListList: { - type: new GraphQLList(new GraphQLList(GraphQLString)), - }, - friendList: { - type: new GraphQLList(friendType), - }, - nonNullFriendList: { - type: new GraphQLList(new GraphQLNonNull(friendType)), - }, - nestedObject: { - type: new GraphQLObjectType({ - name: "NestedObject", - fields: { - scalarField: { - type: GraphQLString, - }, - nonNullScalarField: { - type: new GraphQLNonNull(GraphQLString), - }, - nestedFriendList: { type: new GraphQLList(friendType) }, - deeperNestedObject: { - type: new GraphQLObjectType({ - name: "DeeperNestedObject", - fields: { - nonNullScalarField: { - type: new GraphQLNonNull(GraphQLString), - }, - deeperNestedFriendList: { type: new GraphQLList(friendType) }, - }, - }), - }, - }, - }), - }, - }, - name: "Query", -}); - -const schema = new GraphQLSchema({ query }); - function createLink(rootValue?: Record) { return new ApolloLink((operation) => { return from( - executeSchemaGraphQL17Alpha9(schema, operation.query, rootValue) + executeSchemaGraphQL17Alpha9( + friendListSchemaGraphQL17Alpha9, + operation.query, + rootValue + ) ); }); } diff --git a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts index d560165c819..3dd497d67d1 100644 --- a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts @@ -1,13 +1,5 @@ import assert from "node:assert"; -import { - GraphQLID, - GraphQLList, - GraphQLNonNull, - GraphQLObjectType, - GraphQLSchema, - GraphQLString, -} from "graphql-17-alpha2"; import { from } from "rxjs"; import type { DocumentNode } from "@apollo/client"; @@ -21,6 +13,7 @@ import { import { Defer20220824Handler } from "@apollo/client/incremental"; import { executeSchemaGraphQL17Alpha2, + friendListSchemaGraphQL17Alpha2, markAsStreaming, ObservableStream, promiseWithResolvers, @@ -29,68 +22,18 @@ import { // This is the test setup of the `graphql-js` v17.0.0-alpha.2 release: // https://github.com/graphql/graphql-js/blob/042002c3d332d36c67861f5b37d39b74d54d97d4/src/execution/__tests__/stream-test.ts -const friendType = new GraphQLObjectType({ - fields: { - id: { type: GraphQLID }, - name: { type: GraphQLString }, - nonNullName: { type: new GraphQLNonNull(GraphQLString) }, - }, - name: "Friend", -}); - const friends = [ { name: "Luke", id: 1 }, { name: "Han", id: 2 }, { name: "Leia", id: 3 }, ]; -const query = new GraphQLObjectType({ - fields: { - scalarList: { - type: new GraphQLList(GraphQLString), - }, - scalarListList: { - type: new GraphQLList(new GraphQLList(GraphQLString)), - }, - friendList: { - type: new GraphQLList(friendType), - }, - nonNullFriendList: { - type: new GraphQLList(new GraphQLNonNull(friendType)), - }, - nestedObject: { - type: new GraphQLObjectType({ - name: "NestedObject", - fields: { - scalarField: { - type: GraphQLString, - }, - nonNullScalarField: { - type: new GraphQLNonNull(GraphQLString), - }, - nestedFriendList: { type: new GraphQLList(friendType) }, - deeperNestedObject: { - type: new GraphQLObjectType({ - name: "DeeperNestedObject", - fields: { - nonNullScalarField: { - type: new GraphQLNonNull(GraphQLString), - }, - deeperNestedFriendList: { type: new GraphQLList(friendType) }, - }, - }), - }, - }, - }), - }, - }, - name: "Query", -}); - -const schema = new GraphQLSchema({ query }); - function run(document: DocumentNode, rootValue: unknown = {}) { - return executeSchemaGraphQL17Alpha2(schema, document, rootValue); + return executeSchemaGraphQL17Alpha2( + friendListSchemaGraphQL17Alpha2, + document, + rootValue + ); } function createSchemaLink(rootValue?: Record) { diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index ac26a6505f0..7b00b258c9f 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -1,13 +1,5 @@ import assert from "node:assert"; -import { - GraphQLID, - GraphQLList, - GraphQLNonNull, - GraphQLObjectType, - GraphQLSchema, - GraphQLString, -} from "graphql-17-alpha9"; import { from } from "rxjs"; import type { DocumentNode } from "@apollo/client"; @@ -21,6 +13,7 @@ import { import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { executeSchemaGraphQL17Alpha9, + friendListSchemaGraphQL17Alpha9, markAsStreaming, ObservableStream, promiseWithResolvers, @@ -29,66 +22,12 @@ import { // This is the test setup of the `graphql-js` v17.0.0-alpha.9 release: // https://github.com/graphql/graphql-js/blob/3283f8adf52e77a47f148ff2f30185c8d11ff0f0/src/execution/__tests__/stream-test.ts -const friendType = new GraphQLObjectType({ - fields: { - id: { type: GraphQLID }, - name: { type: GraphQLString }, - nonNullName: { type: new GraphQLNonNull(GraphQLString) }, - }, - name: "Friend", -}); - const friends = [ { name: "Luke", id: 1 }, { name: "Han", id: 2 }, { name: "Leia", id: 3 }, ]; -const query = new GraphQLObjectType({ - fields: { - scalarList: { - type: new GraphQLList(GraphQLString), - }, - scalarListList: { - type: new GraphQLList(new GraphQLList(GraphQLString)), - }, - friendList: { - type: new GraphQLList(friendType), - }, - nonNullFriendList: { - type: new GraphQLList(new GraphQLNonNull(friendType)), - }, - nestedObject: { - type: new GraphQLObjectType({ - name: "NestedObject", - fields: { - scalarField: { - type: GraphQLString, - }, - nonNullScalarField: { - type: new GraphQLNonNull(GraphQLString), - }, - nestedFriendList: { type: new GraphQLList(friendType) }, - deeperNestedObject: { - type: new GraphQLObjectType({ - name: "DeeperNestedObject", - fields: { - nonNullScalarField: { - type: new GraphQLNonNull(GraphQLString), - }, - deeperNestedFriendList: { type: new GraphQLList(friendType) }, - }, - }), - }, - }, - }), - }, - }, - name: "Query", -}); - -const schema = new GraphQLSchema({ query }); - function resolveOnNextTick(): Promise { return Promise.resolve(undefined); } @@ -99,7 +38,7 @@ function run( enableEarlyExecution = false ) { return executeSchemaGraphQL17Alpha9( - schema, + friendListSchemaGraphQL17Alpha9, document, rootValue, enableEarlyExecution From 01cdf3eb5c849d024441fe4308243157a43dab12 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 16:48:17 -0600 Subject: [PATCH 188/254] Add cache tests for defer20220824 stream --- .../__tests__/defer20220824/stream.test.ts | 437 ++++++++++++++++++ 1 file changed, 437 insertions(+) diff --git a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts index 3dd497d67d1..13c09126dc9 100644 --- a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts @@ -1547,3 +1547,440 @@ test("Defer20220824Handler can be used with `ApolloClient`", async () => { partial: false, }); }); + +test("properly merges streamed data into cache data", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke Cached", id: "1" }, + { name: "Han Cached", id: "2" }, + { name: "Leia Cached", id: "3" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia Cached", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia Cached", id: "3" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } +}); + +test("properly merges streamed data into partial cache data", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { friendList: [{ id: "1" }, { id: "2" }, { id: "3" }] }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { id: "3" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } +}); + +test("properly merges streamed data into list with fewer items", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle({ friendList: [{ id: "1", name: "Luke Cached" }] }, chunk) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } +}); + +test("properly merges streamed data into list with more items", async () => { + const query = gql` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke Cached", id: "1" }, + { name: "Han Cached", id: "2" }, + { name: "Leia Cached", id: "3" }, + { name: "Chewbacca Cached", id: "4" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia Cached", id: "3" }, + { name: "Chewbacca Cached", id: "4" }, + ], + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia Cached", id: "3" }, + { name: "Chewbacca Cached", id: "4" }, + ], + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + friendList: [ + { name: "Luke", id: "1" }, + { name: "Han", id: "2" }, + { name: "Leia", id: "3" }, + { name: "Chewbacca Cached", id: "4" }, + ], + }, + }); + expect(request.hasNext).toBe(false); + } +}); + +test("properly merges cache data when list is included in deferred chunk", async () => { + const { promise: slowFieldPromise, resolve: resolveSlowField } = + promiseWithResolvers(); + + const query = gql` + query { + nestedObject { + ...DeferFragment @defer + } + } + fragment DeferFragment on NestedObject { + scalarField + nestedFriendList @stream(initialCount: 0) { + name + } + } + `; + + const handler = new Defer20220824Handler(); + const request = handler.startRequest({ query }); + + const incoming = run(query, { + nestedObject: { + scalarField: () => slowFieldPromise, + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + }, + }, + }); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + nestedObject: { + scalarField: "cached", + nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + }, + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "cached", + nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + resolveSlowField("slow"); + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + nestedObject: { + scalarField: "cached", + nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + }, + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect(request.handle(undefined, chunk)).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + }, + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + }); + expect(request.hasNext).toBe(true); + } + + { + const { value: chunk, done } = await incoming.next(); + + assert(!done); + assert(handler.isIncrementalResult(chunk)); + expect( + request.handle( + { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + chunk + ) + ).toStrictEqualTyped({ + data: { + nestedObject: { + scalarField: "slow", + nestedFriendList: [{ name: "Luke" }, { name: "Han" }], + }, + }, + }); + expect(request.hasNext).toBe(false); + } +}); From 1635680f78427a4e50c0ed40c9c61dce17a567cd Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 16:55:21 -0600 Subject: [PATCH 189/254] Update stream tests to better handle React 18/19 differences --- .../streamGraphQL17Alpha9.test.tsx | 73 +++++++++++++++++++ 1 file changed, 73 insertions(+) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index cba3e80d7fa..8eafae1220b 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -1340,6 +1340,21 @@ test("adds partial data and does not throw errors returned in incremental chunks } subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + subject.next(Promise.reject(new Error("Could not get friend"))); { @@ -1362,6 +1377,34 @@ test("adds partial data and does not throw errors returned in incremental chunks } subject.next(friends[2]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [{ message: "Could not get friend", path: ["friendList", 1] }], + }), + }); + } + subject.complete(); { @@ -1426,6 +1469,21 @@ test("adds partial data and discards errors returned in incremental chunks with } subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + subject.next(Promise.reject(new Error("Could not get friend"))); { @@ -1503,6 +1561,21 @@ test("can refetch and respond to cache updates after encountering an error in an } subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + subject.next(Promise.reject(new Error("Could not get friend"))); { From e723622fbd664e1b40998279de97841a54e473b1 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 16:57:14 -0600 Subject: [PATCH 190/254] Simplify merge function --- src/incremental/handlers/defer20220824.ts | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/src/incremental/handlers/defer20220824.ts b/src/incremental/handlers/defer20220824.ts index bfa9a68d73b..27ce3d3c96d 100644 --- a/src/incremental/handlers/defer20220824.ts +++ b/src/incremental/handlers/defer20220824.ts @@ -75,12 +75,9 @@ class DeferRequest> private extensions: Record = {}; private data: any = {}; - private merge( - normalized: FormattedExecutionResult, - merger: DeepMerger - ) { + private merge(normalized: FormattedExecutionResult) { if (normalized.data !== undefined) { - this.data = merger.merge(this.data, normalized.data); + this.data = new DeepMerger().merge(this.data, normalized.data); } if (normalized.errors) { this.errors.push(...normalized.errors); @@ -96,7 +93,7 @@ class DeferRequest> ): FormattedExecutionResult { this.hasNext = chunk.hasNext; this.data = cacheData; - this.merge(chunk, new DeepMerger()); + this.merge(chunk); if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { @@ -119,14 +116,11 @@ class DeferRequest> data = parent as typeof data; } } - this.merge( - { - errors, - extensions, - data: data ? (data as TData) : undefined, - }, - new DeepMerger() - ); + this.merge({ + errors, + extensions, + data: data ? (data as TData) : undefined, + }); } } From 447dd7d1e87c56ae6d3ca1167128c743b08fc750 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 17:07:53 -0600 Subject: [PATCH 191/254] Add missing args for friendList in alpha2 schema --- src/testing/internal/schemas/friendList.graphql17Alpha2.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/testing/internal/schemas/friendList.graphql17Alpha2.ts b/src/testing/internal/schemas/friendList.graphql17Alpha2.ts index 07ab96da399..17d59da59a4 100644 --- a/src/testing/internal/schemas/friendList.graphql17Alpha2.ts +++ b/src/testing/internal/schemas/friendList.graphql17Alpha2.ts @@ -1,5 +1,6 @@ import { GraphQLID, + GraphQLInt, GraphQLList, GraphQLNonNull, GraphQLObjectType, @@ -26,6 +27,11 @@ const query = new GraphQLObjectType({ }, friendList: { type: new GraphQLList(friendType), + args: { + offset: { + type: GraphQLInt, + }, + }, }, nonNullFriendList: { type: new GraphQLList(new GraphQLNonNull(friendType)), From 2ab4a55127dcd4aad5174fdc25699c11dcee9f62 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 17:08:06 -0600 Subject: [PATCH 192/254] Update useSuspenseQuery tests to be more friendly between react versions --- .../streamDefer20220824.test.tsx | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx index 67c3f15d54c..0d2d441d4c5 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx @@ -1340,6 +1340,21 @@ test("adds partial data and does not throw errors returned in incremental chunks } subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + subject.next(new Error("Could not get friend")); { @@ -1426,6 +1441,21 @@ test("adds partial data and discards errors returned in incremental chunks with } subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + subject.next(new Error("Could not get friend")); { @@ -1503,6 +1533,21 @@ test("can refetch and respond to cache updates after encountering an error in an } subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + subject.next(new Error("Could not get friend")); { From c954d2531deb1b203993d124605fdeab9829826b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 21:56:57 -0600 Subject: [PATCH 193/254] Add useQuery stream tests for GraphQL17Alpha9Handler --- .../useQuery/streamGraphQL17Alpha9.test.tsx | 783 ++++++++++++++++++ 1 file changed, 783 insertions(+) create mode 100644 src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx diff --git a/src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx new file mode 100644 index 00000000000..ab0e022c798 --- /dev/null +++ b/src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx @@ -0,0 +1,783 @@ +import { + disableActEnvironment, + renderHookToSnapshotStream, +} from "@testing-library/react-render-stream"; +import { from } from "rxjs"; + +import { + ApolloClient, + ApolloLink, + CombinedGraphQLErrors, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import { useQuery } from "@apollo/client/react"; +import { + asyncIterableSubject, + createClientWrapper, + executeSchemaGraphQL17Alpha9, + friendListSchemaGraphQL17Alpha9, + markAsStreaming, + spyOnConsole, +} from "@apollo/client/testing/internal"; + +function createLink(rootValue?: unknown) { + return new ApolloLink((operation) => { + return from( + executeSchemaGraphQL17Alpha9( + friendListSchemaGraphQL17Alpha9, + operation.query, + rootValue + ) + ); + }); +} + +const friends = [ + { name: "Luke", id: 1 }, + { name: "Han", id: 2 }, + { name: "Leia", id: 3 }, +]; + +type Friend = (typeof friends)[number]; + +test("should handle streamed queries", async () => { + const { stream, subject } = asyncIterableSubject(); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[1]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + variables: {}, + }); + + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("should handle streamed queries with fetch policy no-cache", async () => { + const { subject, stream } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { fetchPolicy: "no-cache" }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[1]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + variables: {}, + }); + + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("should handle streamed queries with errors returned on the incremental batched result", async () => { + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + subject.next(new Error("Could not load friend")); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + dataState: "complete", + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [ + { + message: "Could not load friend", + path: ["friendList", 1], + }, + ], + }), + loading: false, + networkStatus: NetworkStatus.error, + previousData: undefined, + variables: {}, + }); + + // Emit these to show that errorPolicy of none cuts off future updates + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot).not.toRerender(); +}); + +test('should handle streamed queries with errors returned on the incremental batched result and errorPolicy "all"', async () => { + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { errorPolicy: "all" }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + subject.next(new Error("Could not load friend")); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + dataState: "streaming", + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [ + { + message: "Could not load friend", + path: ["friendList", 1], + }, + ], + }), + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + variables: {}, + }); + + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [ + { + message: "Could not load friend", + path: ["friendList", 1], + }, + ], + }), + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + variables: {}, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [ + { + message: "Could not load friend", + path: ["friendList", 1], + }, + ], + }), + loading: false, + networkStatus: NetworkStatus.error, + previousData: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test('returns eventually consistent data from streamed queries with data in the cache while using a "cache-and-network" fetch policy', async () => { + const { subject, stream } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ friendList: () => stream }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + client.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { fetchPolicy: "cache-and-network" }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + variables: {}, + }); + + subject.next(friends[1]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + variables: {}, + }); + + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +// TODO: Determine how we handle partial data with streamed responses. While this +// works as expected and renders correctly, this also emits missing field +// warnings in the console when writing the result to the cache since array items +// with partial cache data are still included for items that haven't streamed in +// yet. +test('returns eventually consistent data from streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ friendList: () => stream }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + // We know we are writing partial data to the cache so suppress the console + // warning. + { + // using _consoleSpy = spyOnConsole("error"); + client.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => + useQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { + wrapper: createClientWrapper(client), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }, + dataState: "partial", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }, + variables: {}, + }); + + subject.next(friends[1]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }, + variables: {}, + }); + + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); From 1206a77169ba7b2a1c16c92959769edf0772d440 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Thu, 11 Sep 2025 21:58:31 -0600 Subject: [PATCH 194/254] Add useQuery stream tests for Defer20220824Handler --- .../useQuery/streamDefer20220824.test.tsx | 783 ++++++++++++++++++ 1 file changed, 783 insertions(+) create mode 100644 src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx diff --git a/src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx new file mode 100644 index 00000000000..6432ce1d093 --- /dev/null +++ b/src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx @@ -0,0 +1,783 @@ +import { + disableActEnvironment, + renderHookToSnapshotStream, +} from "@testing-library/react-render-stream"; +import { from } from "rxjs"; + +import { + ApolloClient, + ApolloLink, + CombinedGraphQLErrors, + gql, + InMemoryCache, + NetworkStatus, +} from "@apollo/client"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import { useQuery } from "@apollo/client/react"; +import { + asyncIterableSubject, + createClientWrapper, + executeSchemaGraphQL17Alpha2, + friendListSchemaGraphQL17Alpha2, + markAsStreaming, + spyOnConsole, +} from "@apollo/client/testing/internal"; + +function createLink(rootValue?: unknown) { + return new ApolloLink((operation) => { + return from( + executeSchemaGraphQL17Alpha2( + friendListSchemaGraphQL17Alpha2, + operation.query, + rootValue + ) + ); + }); +} + +const friends = [ + { name: "Luke", id: 1 }, + { name: "Han", id: 2 }, + { name: "Leia", id: 3 }, +]; + +type Friend = (typeof friends)[number]; + +test("should handle streamed queries", async () => { + const { stream, subject } = asyncIterableSubject(); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[1]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + variables: {}, + }); + + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("should handle streamed queries with fetch policy no-cache", async () => { + const { subject, stream } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { fetchPolicy: "no-cache" }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[1]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + variables: {}, + }); + + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("should handle streamed queries with errors returned on the incremental batched result", async () => { + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + subject.next(new Error("Could not load friend")); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + dataState: "complete", + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [ + { + message: "Could not load friend", + path: ["friendList", 1], + }, + ], + }), + loading: false, + networkStatus: NetworkStatus.error, + previousData: undefined, + variables: {}, + }); + + // Emit these to show that errorPolicy of none cuts off future updates + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot).not.toRerender(); +}); + +test('should handle streamed queries with errors returned on the incremental batched result and errorPolicy "all"', async () => { + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { errorPolicy: "all" }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: undefined, + variables: {}, + }); + + subject.next(new Error("Could not load friend")); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }), + dataState: "streaming", + error: new CombinedGraphQLErrors({ + data: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + errors: [ + { + message: "Could not load friend", + path: ["friendList", 1], + }, + ], + }), + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }, + variables: {}, + }); + + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [ + { + message: "Could not load friend", + path: ["friendList", 1], + }, + ], + }), + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + variables: {}, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + error: new CombinedGraphQLErrors({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + null, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + errors: [ + { + message: "Could not load friend", + path: ["friendList", 1], + }, + ], + }), + loading: false, + networkStatus: NetworkStatus.error, + previousData: { + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }, null], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test('returns eventually consistent data from streamed queries with data in the cache while using a "cache-and-network" fetch policy', async () => { + const { subject, stream } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ friendList: () => stream }), + incrementalHandler: new Defer20220824Handler(), + }); + + client.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useQuery(query, { fetchPolicy: "cache-and-network" }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + variables: {}, + }); + + subject.next(friends[1]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + variables: {}, + }); + + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +// TODO: Determine how we handle partial data with streamed responses. While this +// works as expected and renders correctly, this also emits missing field +// warnings in the console when writing the result to the cache since array items +// with partial cache data are still included for items that haven't streamed in +// yet. +test('returns eventually consistent data from streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); + const { stream, subject } = asyncIterableSubject(); + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: createLink({ friendList: () => stream }), + incrementalHandler: new Defer20220824Handler(), + }); + + // We know we are writing partial data to the cache so suppress the console + // warning. + { + // using _consoleSpy = spyOnConsole("error"); + client.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => + useQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { + wrapper: createClientWrapper(client), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }, + dataState: "partial", + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + + subject.next(friends[0]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }, + variables: {}, + }); + + subject.next(friends[1]); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }, + variables: {}, + }); + + subject.next(friends[2]); + subject.complete(); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3" }, + ], + }, + variables: {}, + }); + + await expect(takeSnapshot).not.toRerender(); +}); From b05df031912f77546d84b23a5ebebd2bee2ecae1 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 12 Sep 2025 11:38:11 -0600 Subject: [PATCH 195/254] Add stream tests for useBackgroundQuery --- .../streamDefer20220824.test.tsx | 414 ++++++++++++++++++ .../streamGraphQL17Alpha9.test.tsx | 414 ++++++++++++++++++ 2 files changed, 828 insertions(+) create mode 100644 src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx create mode 100644 src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx diff --git a/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx new file mode 100644 index 00000000000..b34b85aa6c6 --- /dev/null +++ b/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx @@ -0,0 +1,414 @@ +import type { RenderOptions } from "@testing-library/react"; +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import React, { Suspense } from "react"; +import { ErrorBoundary } from "react-error-boundary"; +import { from } from "rxjs"; + +import type { + DataState, + ErrorLike, + OperationVariables, + TypedDocumentNode, +} from "@apollo/client"; +import { ApolloClient, ApolloLink, gql, NetworkStatus } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { Defer20220824Handler } from "@apollo/client/incremental"; +import type { QueryRef } from "@apollo/client/react"; +import { useBackgroundQuery, useReadQuery } from "@apollo/client/react"; +import { + asyncIterableSubject, + createClientWrapper, + executeSchemaGraphQL17Alpha2, + friendListSchemaGraphQL17Alpha2, + markAsStreaming, + spyOnConsole, +} from "@apollo/client/testing/internal"; + +async function renderSuspenseHook< + TData, + TVariables extends OperationVariables, + TQueryRef extends QueryRef, + TStates extends DataState["dataState"] = TQueryRef extends ( + QueryRef + ) ? + States + : never, + Props = never, +>( + renderHook: ( + props: Props extends never ? undefined : Props + ) => [TQueryRef, useBackgroundQuery.Result], + options: Pick & { initialProps?: Props } +) { + function UseReadQuery({ queryRef }: { queryRef: QueryRef }) { + useTrackRenders({ name: "useReadQuery" }); + replaceSnapshot(useReadQuery(queryRef) as any); + + return null; + } + + function SuspenseFallback() { + useTrackRenders({ name: "SuspenseFallback" }); + + return null; + } + + function ErrorFallback() { + useTrackRenders({ name: "ErrorBoundary" }); + + return null; + } + + function App({ props }: { props: Props | undefined }) { + useTrackRenders({ name: "useBackgroundQuery" }); + const [queryRef] = renderHook(props as any); + + return ( + }> + replaceSnapshot({ error })} + > + + + + ); + } + + const { render, takeRender, replaceSnapshot } = createRenderStream< + useReadQuery.Result | { error: ErrorLike } + >(); + + const utils = await render(, options); + + function rerender(props: Props) { + return utils.rerender(); + } + + return { takeRender, rerender }; +} + +function createLink(rootValue?: unknown) { + return new ApolloLink((operation) => { + return from( + executeSchemaGraphQL17Alpha2( + friendListSchemaGraphQL17Alpha2, + operation.query, + rootValue + ) + ); + }); +} + +const friends = [ + { name: "Luke", id: 1 }, + { name: "Han", id: 2 }, + { name: "Leia", id: 3 }, +]; + +test('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + const { stream, subject } = asyncIterableSubject(); + interface Data { + friendList: Array<{ __typename: "Friend"; id: string; name: string }>; + } + + const query: TypedDocumentNode = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const cache = new InMemoryCache(); + cache.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + }); + const client = new ApolloClient({ + cache, + link: createLink({ friendList: () => stream }), + incrementalHandler: new Defer20220824Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useBackgroundQuery(query, { fetchPolicy: "cache-and-network" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual([ + "useBackgroundQuery", + "useReadQuery", + ]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +// TODO: Determine how we handle partial data with streamed responses. While this +// works as expected and renders correctly, this also emits missing field +// warnings in the console when writing the result to the cache since array items +// with partial cache data are still included for items that haven't streamed in +// yet. +test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); + const { stream, subject } = asyncIterableSubject(); + interface QueryData { + friendList: Array<{ __typename: "Friend"; id: string; name: string }>; + } + + const query: TypedDocumentNode = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + { + // using _consoleSpy = spyOnConsole("error"); + client.writeQuery({ + query, + data: { + friendList: [ + // @ts-expect-error + { __typename: "Friend", id: "1" }, + // @ts-expect-error + { __typename: "Friend", id: "2" }, + // @ts-expect-error + { __typename: "Friend", id: "3" }, + ], + }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => + useBackgroundQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { wrapper: createClientWrapper(client) } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual([ + "useBackgroundQuery", + "useReadQuery", + ]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }, + dataState: "partial", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + // @ts-expect-error + { __typename: "Friend", id: "2" }, + // @ts-expect-error + { __typename: "Friend", id: "3" }, + ], + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + // @ts-expect-error + { __typename: "Friend", id: "3" }, + ], + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(takeRender).not.toRerender(); +}); diff --git a/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx new file mode 100644 index 00000000000..fd15889a9ad --- /dev/null +++ b/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx @@ -0,0 +1,414 @@ +import type { RenderOptions } from "@testing-library/react"; +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import React, { Suspense } from "react"; +import { ErrorBoundary } from "react-error-boundary"; +import { from } from "rxjs"; + +import type { + DataState, + ErrorLike, + OperationVariables, + TypedDocumentNode, +} from "@apollo/client"; +import { ApolloClient, ApolloLink, gql, NetworkStatus } from "@apollo/client"; +import { InMemoryCache } from "@apollo/client/cache"; +import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; +import type { QueryRef } from "@apollo/client/react"; +import { useBackgroundQuery, useReadQuery } from "@apollo/client/react"; +import { + asyncIterableSubject, + createClientWrapper, + executeSchemaGraphQL17Alpha9, + friendListSchemaGraphQL17Alpha9, + markAsStreaming, + spyOnConsole, +} from "@apollo/client/testing/internal"; + +async function renderSuspenseHook< + TData, + TVariables extends OperationVariables, + TQueryRef extends QueryRef, + TStates extends DataState["dataState"] = TQueryRef extends ( + QueryRef + ) ? + States + : never, + Props = never, +>( + renderHook: ( + props: Props extends never ? undefined : Props + ) => [TQueryRef, useBackgroundQuery.Result], + options: Pick & { initialProps?: Props } +) { + function UseReadQuery({ queryRef }: { queryRef: QueryRef }) { + useTrackRenders({ name: "useReadQuery" }); + replaceSnapshot(useReadQuery(queryRef) as any); + + return null; + } + + function SuspenseFallback() { + useTrackRenders({ name: "SuspenseFallback" }); + + return null; + } + + function ErrorFallback() { + useTrackRenders({ name: "ErrorBoundary" }); + + return null; + } + + function App({ props }: { props: Props | undefined }) { + useTrackRenders({ name: "useBackgroundQuery" }); + const [queryRef] = renderHook(props as any); + + return ( + }> + replaceSnapshot({ error })} + > + + + + ); + } + + const { render, takeRender, replaceSnapshot } = createRenderStream< + useReadQuery.Result | { error: ErrorLike } + >(); + + const utils = await render(, options); + + function rerender(props: Props) { + return utils.rerender(); + } + + return { takeRender, rerender }; +} + +function createLink(rootValue?: unknown) { + return new ApolloLink((operation) => { + return from( + executeSchemaGraphQL17Alpha9( + friendListSchemaGraphQL17Alpha9, + operation.query, + rootValue + ) + ); + }); +} + +const friends = [ + { name: "Luke", id: 1 }, + { name: "Han", id: 2 }, + { name: "Leia", id: 3 }, +]; + +test('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + const { stream, subject } = asyncIterableSubject(); + interface Data { + friendList: Array<{ __typename: "Friend"; id: string; name: string }>; + } + + const query: TypedDocumentNode = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const cache = new InMemoryCache(); + cache.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + }); + const client = new ApolloClient({ + cache, + link: createLink({ friendList: () => stream }), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => useBackgroundQuery(query, { fetchPolicy: "cache-and-network" }), + { wrapper: createClientWrapper(client) } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual([ + "useBackgroundQuery", + "useReadQuery", + ]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +// TODO: Determine how we handle partial data with streamed responses. While this +// works as expected and renders correctly, this also emits missing field +// warnings in the console when writing the result to the cache since array items +// with partial cache data are still included for items that haven't streamed in +// yet. +test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); + const { stream, subject } = asyncIterableSubject(); + interface QueryData { + friendList: Array<{ __typename: "Friend"; id: string; name: string }>; + } + + const query: TypedDocumentNode = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + const client = new ApolloClient({ + link: createLink({ friendList: () => stream }), + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + { + // using _consoleSpy = spyOnConsole("error"); + client.writeQuery({ + query, + data: { + friendList: [ + // @ts-expect-error + { __typename: "Friend", id: "1" }, + // @ts-expect-error + { __typename: "Friend", id: "2" }, + // @ts-expect-error + { __typename: "Friend", id: "3" }, + ], + }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderSuspenseHook( + () => + useBackgroundQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { wrapper: createClientWrapper(client) } + ); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual([ + "useBackgroundQuery", + "useReadQuery", + ]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1" }, + { __typename: "Friend", id: "2" }, + { __typename: "Friend", id: "3" }, + ], + }, + dataState: "partial", + error: undefined, + networkStatus: NetworkStatus.loading, + }); + } + + subject.next(friends[0]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + // @ts-expect-error + { __typename: "Friend", id: "2" }, + // @ts-expect-error + { __typename: "Friend", id: "3" }, + ], + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + subject.next(friends[1]); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + // @ts-expect-error + { __typename: "Friend", id: "3" }, + ], + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + subject.next(friends[2]); + subject.complete(); + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "streaming", + error: undefined, + networkStatus: NetworkStatus.streaming, + }); + } + + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useReadQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + await expect(takeRender).not.toRerender(); +}); From 1a2c04d5a303581fd31538831b89d18653ee692a Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 12 Sep 2025 17:12:18 -0600 Subject: [PATCH 196/254] Add array merge strategy to deep merger --- src/utilities/internal/DeepMerger.ts | 25 ++++++++++++++++++- .../internal/__tests__/DeepMerger.test.ts | 20 +++++++++++++++ 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/src/utilities/internal/DeepMerger.ts b/src/utilities/internal/DeepMerger.ts index ebbcce20660..4ad61528f9e 100644 --- a/src/utilities/internal/DeepMerger.ts +++ b/src/utilities/internal/DeepMerger.ts @@ -10,6 +10,14 @@ type ReconcilerFunction = ( ...context: TContextArgs ) => any; +type ArrayMergeStrategy = + // Truncate the target array to the source length, then deep merge the array + // items at the same index + | "truncate" + // Combine arrays and deep merge array items for items at the same index. + // This is the default + | "combine"; + const defaultReconciler: ReconcilerFunction = function ( target, source, @@ -18,13 +26,28 @@ const defaultReconciler: ReconcilerFunction = function ( return this.merge(target[property], source[property]); }; +export declare namespace DeepMerger { + export interface Options { + arrayMerge?: ArrayMergeStrategy; + } +} + /** @internal */ export class DeepMerger { constructor( - private reconciler: ReconcilerFunction = defaultReconciler as any as ReconcilerFunction + private reconciler: ReconcilerFunction = defaultReconciler as any as ReconcilerFunction, + private options: DeepMerger.Options = {} ) {} public merge(target: any, source: any, ...context: TContextArgs): any { + if ( + Array.isArray(target) && + Array.isArray(source) && + this.options.arrayMerge === "truncate" + ) { + target = this.shallowCopyForMerge(target).slice(0, source.length); + } + if (isNonNullObject(source) && isNonNullObject(target)) { Object.keys(source).forEach((sourceKey) => { if (hasOwnProperty.call(target, sourceKey)) { diff --git a/src/utilities/internal/__tests__/DeepMerger.test.ts b/src/utilities/internal/__tests__/DeepMerger.test.ts index 88d6b05a2ba..61f5e38aed9 100644 --- a/src/utilities/internal/__tests__/DeepMerger.test.ts +++ b/src/utilities/internal/__tests__/DeepMerger.test.ts @@ -94,3 +94,23 @@ test("provides optional context to reconciler function", function () { expect(typicalContextValues[0]).toBe(contextObject); expect(typicalContextValues[1]).toBe(contextObject); }); + +test("deep merges each array item keeping length by default", () => { + const target = [{ a: 1, b: { c: 2 } }, { e: 5 }]; + const source = [{ a: 2, b: { c: 2, d: 3 } }]; + + const result = new DeepMerger().merge(target, source); + + expect(result).toEqual([{ a: 2, b: { c: 2, d: 3 } }, { e: 5 }]); +}); + +test("deep merges each array item and truncates source to target length", () => { + const target = [{ a: 1, b: { c: 2 } }, { e: 5 }]; + const source = [{ a: 2, b: { c: 2, d: 3 } }]; + + const result = new DeepMerger(undefined, { + arrayMerge: "truncate", + }).merge(target, source); + + expect(result).toEqual([{ a: 2, b: { c: 2, d: 3 } }]); +}); From 92078c60e739630044cb9d25b865c5467ba950f4 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 12 Sep 2025 17:14:18 -0600 Subject: [PATCH 197/254] Default DeepMerger generic argument --- src/utilities/internal/DeepMerger.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/utilities/internal/DeepMerger.ts b/src/utilities/internal/DeepMerger.ts index 4ad61528f9e..958ba5d3977 100644 --- a/src/utilities/internal/DeepMerger.ts +++ b/src/utilities/internal/DeepMerger.ts @@ -33,7 +33,7 @@ export declare namespace DeepMerger { } /** @internal */ -export class DeepMerger { +export class DeepMerger { constructor( private reconciler: ReconcilerFunction = defaultReconciler as any as ReconcilerFunction, private options: DeepMerger.Options = {} From 308702aef21a6306bf546b5b6a3405fe2a56845a Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 12 Sep 2025 17:22:47 -0600 Subject: [PATCH 198/254] WIP truncate merge arrays --- .../__tests__/graphql17Alpha9/stream.test.ts | 15 +++---------- src/incremental/handlers/graphql17Alpha9.ts | 22 ++++++++++++------- 2 files changed, 17 insertions(+), 20 deletions(-) diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 7b00b258c9f..bded4641629 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -2484,7 +2484,6 @@ test("properly merges streamed data into cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, ], }, }); @@ -2502,7 +2501,6 @@ test("properly merges streamed data into cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, ], }, chunk @@ -2554,7 +2552,6 @@ test("properly merges streamed data into partial cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { id: "3" }, ], }, }); @@ -2572,7 +2569,6 @@ test("properly merges streamed data into partial cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { id: "3" }, ], }, chunk @@ -2697,8 +2693,6 @@ test("properly merges streamed data into list with more items", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, }); @@ -2716,8 +2710,6 @@ test("properly merges streamed data into list with more items", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, chunk @@ -2728,7 +2720,6 @@ test("properly merges streamed data into list with more items", async () => { { name: "Luke", id: "1" }, { name: "Han", id: "2" }, { name: "Leia", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, }); @@ -2814,7 +2805,7 @@ test("properly merges cache data when list is included in deferred chunk", async data: { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + nestedFriendList: [], }, }, }); @@ -2830,7 +2821,7 @@ test("properly merges cache data when list is included in deferred chunk", async data: { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + nestedFriendList: [{ name: "Luke" }], }, }, }); @@ -2847,7 +2838,7 @@ test("properly merges cache data when list is included in deferred chunk", async { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + nestedFriendList: [{ name: "Luke" }], }, }, chunk diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 2355ba10b05..4cfc2d447c9 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -119,7 +119,7 @@ class IncrementalRequest } } - this.merge(chunk); + this.merge(chunk, new DeepMerger(undefined, { arrayMerge: "truncate" })); if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { @@ -181,11 +181,14 @@ class IncrementalRequest data = parent; } - this.merge({ - data, - extensions: incremental.extensions, - errors: incremental.errors, - }); + this.merge( + { + data, + extensions: incremental.extensions, + errors: incremental.errors, + }, + new DeepMerger(undefined, { arrayMerge: "truncate" }) + ); } } @@ -212,9 +215,12 @@ class IncrementalRequest return result; } - private merge(normalized: FormattedExecutionResult) { + private merge( + normalized: FormattedExecutionResult, + merger: DeepMerger + ) { if (normalized.data !== undefined) { - this.data = new DeepMerger().merge(this.data, normalized.data); + this.data = merger.merge(this.data, normalized.data); } if (normalized.errors) { From 67161f6509b6cf39a1fd1bc38da48b9aa7386a3b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 09:21:09 -0600 Subject: [PATCH 199/254] Check length before truncating --- src/utilities/internal/DeepMerger.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/utilities/internal/DeepMerger.ts b/src/utilities/internal/DeepMerger.ts index 958ba5d3977..01ac04027ee 100644 --- a/src/utilities/internal/DeepMerger.ts +++ b/src/utilities/internal/DeepMerger.ts @@ -43,7 +43,8 @@ export class DeepMerger { if ( Array.isArray(target) && Array.isArray(source) && - this.options.arrayMerge === "truncate" + this.options.arrayMerge === "truncate" && + target.length > source.length ) { target = this.shallowCopyForMerge(target).slice(0, source.length); } From 16bf7bc9c8ab4b2f5eab187ce7e6f2dd67b6f7a0 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 10:03:32 -0600 Subject: [PATCH 200/254] Add another test for truncate merge --- src/utilities/internal/__tests__/DeepMerger.test.ts | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/utilities/internal/__tests__/DeepMerger.test.ts b/src/utilities/internal/__tests__/DeepMerger.test.ts index 61f5e38aed9..01b0361ac83 100644 --- a/src/utilities/internal/__tests__/DeepMerger.test.ts +++ b/src/utilities/internal/__tests__/DeepMerger.test.ts @@ -104,7 +104,7 @@ test("deep merges each array item keeping length by default", () => { expect(result).toEqual([{ a: 2, b: { c: 2, d: 3 } }, { e: 5 }]); }); -test("deep merges each array item and truncates source to target length", () => { +test("deep merges each array item and truncates source to target length when using truncate arrayMerge", () => { const target = [{ a: 1, b: { c: 2 } }, { e: 5 }]; const source = [{ a: 2, b: { c: 2, d: 3 } }]; @@ -114,3 +114,14 @@ test("deep merges each array item and truncates source to target length", () => expect(result).toEqual([{ a: 2, b: { c: 2, d: 3 } }]); }); + +test("maintains source length when using truncate arrayMerge when source is longer than target length", () => { + const target = [{ a: 1, b: { c: 2 } }]; + const source = [{ a: 2 }, { e: 2 }]; + + const result = new DeepMerger(undefined, { + arrayMerge: "truncate", + }).merge(target, source); + + expect(result).toEqual([{ a: 2, b: { c: 2 } }, { e: 2 }]); +}); From 8442c849aec20220db7fbdbec8010e64a4fe2ae1 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 10:05:02 -0600 Subject: [PATCH 201/254] Add past copies instead of copying twice --- src/utilities/internal/DeepMerger.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/utilities/internal/DeepMerger.ts b/src/utilities/internal/DeepMerger.ts index 01ac04027ee..8b986743c42 100644 --- a/src/utilities/internal/DeepMerger.ts +++ b/src/utilities/internal/DeepMerger.ts @@ -46,7 +46,8 @@ export class DeepMerger { this.options.arrayMerge === "truncate" && target.length > source.length ) { - target = this.shallowCopyForMerge(target).slice(0, source.length); + target = target.slice(0, source.length); + this.pastCopies.add(target); } if (isNonNullObject(source) && isNonNullObject(target)) { From 2f00a397f3b021e2aae1dbb1147f75b0e221d47d Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 10:06:33 -0600 Subject: [PATCH 202/254] Inline new merger --- src/incremental/handlers/graphql17Alpha9.ts | 25 +++++++++------------ 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 4cfc2d447c9..67143f3cb64 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -119,7 +119,7 @@ class IncrementalRequest } } - this.merge(chunk, new DeepMerger(undefined, { arrayMerge: "truncate" })); + this.merge(chunk); if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { @@ -181,14 +181,11 @@ class IncrementalRequest data = parent; } - this.merge( - { - data, - extensions: incremental.extensions, - errors: incremental.errors, - }, - new DeepMerger(undefined, { arrayMerge: "truncate" }) - ); + this.merge({ + data, + extensions: incremental.extensions, + errors: incremental.errors, + }); } } @@ -215,12 +212,12 @@ class IncrementalRequest return result; } - private merge( - normalized: FormattedExecutionResult, - merger: DeepMerger - ) { + private merge(normalized: FormattedExecutionResult) { if (normalized.data !== undefined) { - this.data = merger.merge(this.data, normalized.data); + this.data = new DeepMerger(undefined, { arrayMerge: "truncate" }).merge( + this.data, + normalized.data + ); } if (normalized.errors) { From 05f9085f5819277f02ca6534bbda26fa42697f73 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 10:20:59 -0600 Subject: [PATCH 203/254] Move type to namespace --- src/utilities/internal/DeepMerger.ts | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/utilities/internal/DeepMerger.ts b/src/utilities/internal/DeepMerger.ts index 8b986743c42..5f3425e8711 100644 --- a/src/utilities/internal/DeepMerger.ts +++ b/src/utilities/internal/DeepMerger.ts @@ -10,14 +10,6 @@ type ReconcilerFunction = ( ...context: TContextArgs ) => any; -type ArrayMergeStrategy = - // Truncate the target array to the source length, then deep merge the array - // items at the same index - | "truncate" - // Combine arrays and deep merge array items for items at the same index. - // This is the default - | "combine"; - const defaultReconciler: ReconcilerFunction = function ( target, source, @@ -28,8 +20,16 @@ const defaultReconciler: ReconcilerFunction = function ( export declare namespace DeepMerger { export interface Options { - arrayMerge?: ArrayMergeStrategy; + arrayMerge?: DeepMerger.ArrayMergeStrategy; } + + export type ArrayMergeStrategy = + // Truncate the target array to the source length, then deep merge the array + // items at the same index + | "truncate" + // Combine arrays and deep merge array items for items at the same index. + // This is the default + | "combine"; } /** @internal */ From 07567efa4ea32228a0477c0b766c4752f4e090c4 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 10:22:48 -0600 Subject: [PATCH 204/254] Use dynamic array merge strategies --- src/incremental/handlers/graphql17Alpha9.ts | 24 ++++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 67143f3cb64..d36a7a03f28 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -119,7 +119,7 @@ class IncrementalRequest } } - this.merge(chunk); + this.merge(chunk, "truncate"); if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { @@ -133,6 +133,7 @@ class IncrementalRequest const path = pending.path.concat(incremental.subPath ?? []); let data: any; + let arrayMerge: DeepMerger.ArrayMergeStrategy = "combine"; if ("items" in incremental) { const items = incremental.items as any[]; const parent: any[] = []; @@ -168,6 +169,7 @@ class IncrementalRequest if (Array.isArray(dataAtPath)) { this.streamPositions[pendingItem.id] = dataAtPath.length; + arrayMerge = "truncate"; } } } @@ -181,11 +183,14 @@ class IncrementalRequest data = parent; } - this.merge({ - data, - extensions: incremental.extensions, - errors: incremental.errors, - }); + this.merge( + { + data, + extensions: incremental.extensions, + errors: incremental.errors, + }, + arrayMerge + ); } } @@ -212,9 +217,12 @@ class IncrementalRequest return result; } - private merge(normalized: FormattedExecutionResult) { + private merge( + normalized: FormattedExecutionResult, + arrayMerge: "combine" | "truncate" + ) { if (normalized.data !== undefined) { - this.data = new DeepMerger(undefined, { arrayMerge: "truncate" }).merge( + this.data = new DeepMerger(undefined, { arrayMerge }).merge( this.data, normalized.data ); From 21a1928473428c2640d301b1397c1c876bb3ccd5 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 10:34:04 -0600 Subject: [PATCH 205/254] Use property type for array merge --- src/incremental/handlers/graphql17Alpha9.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index d36a7a03f28..a28498306c1 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -219,7 +219,7 @@ class IncrementalRequest private merge( normalized: FormattedExecutionResult, - arrayMerge: "combine" | "truncate" + arrayMerge: DeepMerger.ArrayMergeStrategy ) { if (normalized.data !== undefined) { this.data = new DeepMerger(undefined, { arrayMerge }).merge( From 296dc9a62c4c7a282829235357e13d13af6d1a07 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 10:35:28 -0600 Subject: [PATCH 206/254] Truncate arrays in defer20220824 handler --- .../__tests__/defer20220824/stream.test.ts | 15 +++------------ src/incremental/handlers/defer20220824.ts | 5 ++++- 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts index 13c09126dc9..3f2cd4e4fed 100644 --- a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts @@ -1586,7 +1586,6 @@ test("properly merges streamed data into cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, ], }, }); @@ -1604,7 +1603,6 @@ test("properly merges streamed data into cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, ], }, chunk @@ -1654,7 +1652,6 @@ test("properly merges streamed data into partial cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { id: "3" }, ], }, }); @@ -1672,7 +1669,6 @@ test("properly merges streamed data into partial cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { id: "3" }, ], }, chunk @@ -1792,8 +1788,6 @@ test("properly merges streamed data into list with more items", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, }); @@ -1811,8 +1805,6 @@ test("properly merges streamed data into list with more items", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, chunk @@ -1823,7 +1815,6 @@ test("properly merges streamed data into list with more items", async () => { { name: "Luke", id: "1" }, { name: "Han", id: "2" }, { name: "Leia", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, }); @@ -1909,7 +1900,7 @@ test("properly merges cache data when list is included in deferred chunk", async data: { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + nestedFriendList: [], }, }, }); @@ -1925,7 +1916,7 @@ test("properly merges cache data when list is included in deferred chunk", async data: { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + nestedFriendList: [{ name: "Luke" }], }, }, }); @@ -1942,7 +1933,7 @@ test("properly merges cache data when list is included in deferred chunk", async { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + nestedFriendList: [{ name: "Luke" }], }, }, chunk diff --git a/src/incremental/handlers/defer20220824.ts b/src/incremental/handlers/defer20220824.ts index 27ce3d3c96d..77d6d4fc7a9 100644 --- a/src/incremental/handlers/defer20220824.ts +++ b/src/incremental/handlers/defer20220824.ts @@ -77,7 +77,10 @@ class DeferRequest> private merge(normalized: FormattedExecutionResult) { if (normalized.data !== undefined) { - this.data = new DeepMerger().merge(this.data, normalized.data); + this.data = new DeepMerger(undefined, { arrayMerge: "truncate" }).merge( + this.data, + normalized.data + ); } if (normalized.errors) { this.errors.push(...normalized.errors); From 156fa22eaaaa4ee661a98a90e6dc37e73452dd73 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 10:42:50 -0600 Subject: [PATCH 207/254] Update useBackgroundQuery tests to reflect updated nature on lists --- .../streamDefer20220824.test.tsx | 45 ++++++++++++------- .../streamGraphQL17Alpha9.test.tsx | 45 ++++++++++++------- 2 files changed, 56 insertions(+), 34 deletions(-) diff --git a/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx index b34b85aa6c6..045d8529679 100644 --- a/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx @@ -125,7 +125,19 @@ test('does not suspend deferred queries with data in the cache and using a "cach } `; - const cache = new InMemoryCache(); + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => { + return incoming; + }, + }, + }, + }, + }, + }); cache.writeQuery({ query, data: { @@ -177,11 +189,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot).toStrictEqualTyped({ data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, dataState: "streaming", error: undefined, @@ -200,7 +208,6 @@ test('does not suspend deferred queries with data in the cache and using a "cach friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, dataState: "streaming", @@ -274,7 +281,19 @@ test('does not suspend deferred queries with partial data in the cache and using const client = new ApolloClient({ link: createLink({ friendList: () => stream }), - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => { + return incoming; + }, + }, + }, + }, + }, + }), incrementalHandler: new Defer20220824Handler(), }); @@ -336,13 +355,7 @@ test('does not suspend deferred queries with partial data in the cache and using expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot).toStrictEqualTyped({ data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - // @ts-expect-error - { __typename: "Friend", id: "2" }, - // @ts-expect-error - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, dataState: "streaming", error: undefined, @@ -361,8 +374,6 @@ test('does not suspend deferred queries with partial data in the cache and using friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - // @ts-expect-error - { __typename: "Friend", id: "3" }, ], }, dataState: "streaming", diff --git a/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx index fd15889a9ad..f3e01cd0458 100644 --- a/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx @@ -125,7 +125,19 @@ test('does not suspend deferred queries with data in the cache and using a "cach } `; - const cache = new InMemoryCache(); + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => { + return incoming; + }, + }, + }, + }, + }, + }); cache.writeQuery({ query, data: { @@ -177,11 +189,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot).toStrictEqualTyped({ data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, dataState: "streaming", error: undefined, @@ -200,7 +208,6 @@ test('does not suspend deferred queries with data in the cache and using a "cach friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, dataState: "streaming", @@ -274,7 +281,19 @@ test('does not suspend deferred queries with partial data in the cache and using const client = new ApolloClient({ link: createLink({ friendList: () => stream }), - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => { + return incoming; + }, + }, + }, + }, + }, + }), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -336,13 +355,7 @@ test('does not suspend deferred queries with partial data in the cache and using expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot).toStrictEqualTyped({ data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - // @ts-expect-error - { __typename: "Friend", id: "2" }, - // @ts-expect-error - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, dataState: "streaming", error: undefined, @@ -361,8 +374,6 @@ test('does not suspend deferred queries with partial data in the cache and using friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - // @ts-expect-error - { __typename: "Friend", id: "3" }, ], }, dataState: "streaming", From 58b5269d1f3e84869dd46411aef05568ba2f9180 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 10:59:06 -0600 Subject: [PATCH 208/254] Combine array items if merging defer arrays --- src/incremental/handlers/defer20220824.ts | 24 ++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/src/incremental/handlers/defer20220824.ts b/src/incremental/handlers/defer20220824.ts index 77d6d4fc7a9..13a495f4ab6 100644 --- a/src/incremental/handlers/defer20220824.ts +++ b/src/incremental/handlers/defer20220824.ts @@ -75,9 +75,12 @@ class DeferRequest> private extensions: Record = {}; private data: any = {}; - private merge(normalized: FormattedExecutionResult) { + private merge( + normalized: FormattedExecutionResult, + arrayMerge: DeepMerger.ArrayMergeStrategy = "truncate" + ) { if (normalized.data !== undefined) { - this.data = new DeepMerger(undefined, { arrayMerge: "truncate" }).merge( + this.data = new DeepMerger(undefined, { arrayMerge }).merge( this.data, normalized.data ); @@ -101,6 +104,7 @@ class DeferRequest> if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { const { path, errors, extensions } = incremental; + let arrayMerge: DeepMerger.ArrayMergeStrategy = "truncate"; let data = // The item merged from a `@stream` chunk is always the first item in // the `items` array @@ -115,15 +119,21 @@ class DeferRequest> const key = path[i]; const isNumericKey = !isNaN(+key); const parent: Record = isNumericKey ? [] : {}; + if (isNumericKey) { + arrayMerge = "combine"; + } parent[key] = data; data = parent as typeof data; } } - this.merge({ - errors, - extensions, - data: data ? (data as TData) : undefined, - }); + this.merge( + { + errors, + extensions, + data: data ? (data as TData) : undefined, + }, + arrayMerge + ); } } From ede14765678542c5f219eb9ae6dbdc18e9a6c5ad Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 11:59:02 -0600 Subject: [PATCH 209/254] Make graphql17Alpha9 more like Defer20220824 when determining arrayMerge behavior --- src/incremental/handlers/graphql17Alpha9.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index a28498306c1..51ea4acc714 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -133,7 +133,7 @@ class IncrementalRequest const path = pending.path.concat(incremental.subPath ?? []); let data: any; - let arrayMerge: DeepMerger.ArrayMergeStrategy = "combine"; + let arrayMerge: DeepMerger.ArrayMergeStrategy = "truncate"; if ("items" in incremental) { const items = incremental.items as any[]; const parent: any[] = []; @@ -169,7 +169,6 @@ class IncrementalRequest if (Array.isArray(dataAtPath)) { this.streamPositions[pendingItem.id] = dataAtPath.length; - arrayMerge = "truncate"; } } } @@ -180,6 +179,9 @@ class IncrementalRequest const parent: Record = typeof key === "number" ? [] : {}; parent[key] = data; + if (typeof key === "number") { + arrayMerge = "combine"; + } data = parent; } From 64d8536a2e4ae49ae9a6f8bf15ab181fad57960f Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 14:22:04 -0600 Subject: [PATCH 210/254] Update useSuspenseQuery stream tests with updated behavior of list merging --- .../streamDefer20220824.test.tsx | 55 +++++++++---------- .../streamGraphQL17Alpha9.test.tsx | 55 +++++++++---------- 2 files changed, 52 insertions(+), 58 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx index 0d2d441d4c5..181d9b26047 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx @@ -368,11 +368,6 @@ test('does not suspend streamed queries with data in the cache and using a "cach await expect(takeRender).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('does not suspend streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { subject, stream } = asyncIterableSubject(); @@ -386,7 +381,17 @@ test('does not suspend streamed queries with partial data in the cache and using } `; - const cache = new InMemoryCache(); + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }); // We are intentionally writing partial data to the cache. Supress console // warnings to avoid unnecessary noise in the test. @@ -446,11 +451,7 @@ test('does not suspend streamed queries with partial data in the cache and using expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -469,7 +470,6 @@ test('does not suspend streamed queries with partial data in the cache and using friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }), dataState: "streaming", @@ -514,7 +514,17 @@ test('does not suspend streamed queries with data in the cache and using a "cach `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new Defer20220824Handler(), }); @@ -562,11 +572,7 @@ test('does not suspend streamed queries with data in the cache and using a "cach expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -689,11 +695,7 @@ test("incrementally rerenders data returned by a `refetch` for a streamed query" expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke (refetch)" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke (refetch)" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -1615,11 +1617,7 @@ test("can refetch and respond to cache updates after encountering an error in an expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - null, - { __typename: "Friend", id: "3", name: "Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -1638,7 +1636,6 @@ test("can refetch and respond to cache updates after encountering an error in an friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, ], }), dataState: "streaming", diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index 8eafae1220b..9de64a9accd 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -368,11 +368,6 @@ test('does not suspend streamed queries with data in the cache and using a "cach await expect(takeRender).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('does not suspend streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { subject, stream } = asyncIterableSubject(); @@ -386,7 +381,17 @@ test('does not suspend streamed queries with partial data in the cache and using } `; - const cache = new InMemoryCache(); + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }); // We are intentionally writing partial data to the cache. Supress console // warnings to avoid unnecessary noise in the test. @@ -446,11 +451,7 @@ test('does not suspend streamed queries with partial data in the cache and using expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -469,7 +470,6 @@ test('does not suspend streamed queries with partial data in the cache and using friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }), dataState: "streaming", @@ -514,7 +514,17 @@ test('does not suspend streamed queries with data in the cache and using a "cach `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -562,11 +572,7 @@ test('does not suspend streamed queries with data in the cache and using a "cach expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -689,11 +695,7 @@ test("incrementally rerenders data returned by a `refetch` for a streamed query" expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke (refetch)" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke (refetch)" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -1643,11 +1645,7 @@ test("can refetch and respond to cache updates after encountering an error in an expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - null, - { __typename: "Friend", id: "3", name: "Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -1666,7 +1664,6 @@ test("can refetch and respond to cache updates after encountering an error in an friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, ], }), dataState: "streaming", From 5d7626a66bd34d3f78916b0bdf94a196204288a5 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 14:24:19 -0600 Subject: [PATCH 211/254] Update spyOnConsole statement --- .../__tests__/useSuspenseQuery/streamDefer20220824.test.tsx | 3 +-- .../__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx index 181d9b26047..4468bb2a2d2 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx @@ -369,7 +369,6 @@ test('does not suspend streamed queries with data in the cache and using a "cach }); test('does not suspend streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { subject, stream } = asyncIterableSubject(); const query = gql` @@ -396,7 +395,7 @@ test('does not suspend streamed queries with partial data in the cache and using // We are intentionally writing partial data to the cache. Supress console // warnings to avoid unnecessary noise in the test. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); cache.writeQuery({ query, data: { diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index 9de64a9accd..791d712bd25 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -369,7 +369,6 @@ test('does not suspend streamed queries with data in the cache and using a "cach }); test('does not suspend streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { subject, stream } = asyncIterableSubject(); const query = gql` @@ -396,7 +395,7 @@ test('does not suspend streamed queries with partial data in the cache and using // We are intentionally writing partial data to the cache. Supress console // warnings to avoid unnecessary noise in the test. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); cache.writeQuery({ query, data: { From 32ad13a026a7b7ba96827c88dac3ed307d4e5f95 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 14:25:00 -0600 Subject: [PATCH 212/254] Remove todo in useBackgroundQuery tests --- .../useBackgroundQuery/streamDefer20220824.test.tsx | 8 +------- .../useBackgroundQuery/streamGraphQL17Alpha9.test.tsx | 8 +------- 2 files changed, 2 insertions(+), 14 deletions(-) diff --git a/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx index 045d8529679..bff598f1047 100644 --- a/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx @@ -258,13 +258,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach await expect(takeRender).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { stream, subject } = asyncIterableSubject(); interface QueryData { friendList: Array<{ __typename: "Friend"; id: string; name: string }>; @@ -300,7 +294,7 @@ test('does not suspend deferred queries with partial data in the cache and using // We are intentionally writing partial data to the cache. Supress console // warnings to avoid unnecessary noise in the test. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); client.writeQuery({ query, data: { diff --git a/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx index f3e01cd0458..e5010152249 100644 --- a/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx @@ -258,13 +258,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach await expect(takeRender).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { stream, subject } = asyncIterableSubject(); interface QueryData { friendList: Array<{ __typename: "Friend"; id: string; name: string }>; @@ -300,7 +294,7 @@ test('does not suspend deferred queries with partial data in the cache and using // We are intentionally writing partial data to the cache. Supress console // warnings to avoid unnecessary noise in the test. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); client.writeQuery({ query, data: { From ac335784fe7c4b04ce35297b60fed40ebf24909a Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 14:31:37 -0600 Subject: [PATCH 213/254] Update useQuery stream tests --- .../useQuery/streamDefer20220824.test.tsx | 62 ++++++++----------- .../useQuery/streamGraphQL17Alpha9.test.tsx | 62 ++++++++----------- 2 files changed, 54 insertions(+), 70 deletions(-) diff --git a/src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx index 6432ce1d093..aad6b5a7618 100644 --- a/src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx +++ b/src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx @@ -491,7 +491,17 @@ test('returns eventually consistent data from streamed queries with data in the `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new Defer20220824Handler(), }); @@ -532,11 +542,7 @@ test('returns eventually consistent data from streamed queries with data in the await expect(takeSnapshot()).resolves.toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", loading: true, @@ -558,18 +564,13 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }), dataState: "streaming", loading: true, networkStatus: NetworkStatus.streaming, previousData: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, variables: {}, }); @@ -592,7 +593,6 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, variables: {}, @@ -613,7 +613,6 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, variables: {}, @@ -622,13 +621,7 @@ test('returns eventually consistent data from streamed queries with data in the await expect(takeSnapshot).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('returns eventually consistent data from streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { stream, subject } = asyncIterableSubject(); const query = gql` query { @@ -640,7 +633,17 @@ test('returns eventually consistent data from streamed queries with partial data `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new Defer20220824Handler(), }); @@ -648,7 +651,7 @@ test('returns eventually consistent data from streamed queries with partial data // We know we are writing partial data to the cache so suppress the console // warning. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); client.writeQuery({ query, data: { @@ -692,11 +695,7 @@ test('returns eventually consistent data from streamed queries with partial data await expect(takeSnapshot()).resolves.toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", loading: true, @@ -718,18 +717,13 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }), dataState: "streaming", loading: true, networkStatus: NetworkStatus.streaming, previousData: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, variables: {}, }); @@ -752,7 +746,6 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }, variables: {}, @@ -773,7 +766,6 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }, variables: {}, diff --git a/src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx index ab0e022c798..28a65e677f0 100644 --- a/src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx @@ -491,7 +491,17 @@ test('returns eventually consistent data from streamed queries with data in the `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -532,11 +542,7 @@ test('returns eventually consistent data from streamed queries with data in the await expect(takeSnapshot()).resolves.toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", loading: true, @@ -558,18 +564,13 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }), dataState: "streaming", loading: true, networkStatus: NetworkStatus.streaming, previousData: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, variables: {}, }); @@ -592,7 +593,6 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, variables: {}, @@ -613,7 +613,6 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, variables: {}, @@ -622,13 +621,7 @@ test('returns eventually consistent data from streamed queries with data in the await expect(takeSnapshot).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('returns eventually consistent data from streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { stream, subject } = asyncIterableSubject(); const query = gql` query { @@ -640,7 +633,17 @@ test('returns eventually consistent data from streamed queries with partial data `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -648,7 +651,7 @@ test('returns eventually consistent data from streamed queries with partial data // We know we are writing partial data to the cache so suppress the console // warning. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); client.writeQuery({ query, data: { @@ -692,11 +695,7 @@ test('returns eventually consistent data from streamed queries with partial data await expect(takeSnapshot()).resolves.toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", loading: true, @@ -718,18 +717,13 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }), dataState: "streaming", loading: true, networkStatus: NetworkStatus.streaming, previousData: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, variables: {}, }); @@ -752,7 +746,6 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }, variables: {}, @@ -773,7 +766,6 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }, variables: {}, From 3715a17acbf9d713721261b11fc1799c0a650df1 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 14:33:55 -0600 Subject: [PATCH 214/254] Remove unneeded arg --- .../__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts index bdcd108a54e..871ec3da62a 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts @@ -792,7 +792,7 @@ test("handles @defer inside @stream", async () => { const { promise: iterableCompletionPromise, resolve: resolveIterableCompletion, - } = promiseWithResolvers(); + } = promiseWithResolvers(); const client = new ApolloClient({ link: createLink({ @@ -841,7 +841,7 @@ test("handles @defer inside @stream", async () => { partial: true, }); - resolveIterableCompletion(null); + resolveIterableCompletion(); await expect(observableStream).toEmitSimilarValue({ expected: (previous) => ({ From f9e176b3b3c8cd88e3035a37d1e96796077db0c9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 14:51:21 -0600 Subject: [PATCH 215/254] Add test to ensure custom merge function can be used to combine cached/streamed lists --- .../streamDefer20220824.test.ts | 108 ++++++++++++++++ .../streamGraphQL17Alpha9.test.ts | 122 ++++++++++++++++++ 2 files changed, 230 insertions(+) diff --git a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts index 01ab8f1f78a..8216613d93a 100644 --- a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts +++ b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts @@ -17,6 +17,7 @@ import { ObservableStream, promiseWithResolvers, } from "@apollo/client/testing/internal"; +import { hasDirectives } from "@apollo/client/utilities/internal"; const friends = [ { name: "Luke", id: 1 }, @@ -879,3 +880,110 @@ test("handles @defer inside @stream", async () => { await expect(observableStream).not.toEmitAnything(); }); + +test("can use custom merge function to combine cached and streamed lists", async () => { + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (existing = [], incoming, { field }) => { + if (field && hasDirectives(["stream"], field)) { + const merged: any[] = []; + + for ( + let i = 0; + i < Math.max(existing.length, incoming.length); + i++ + ) { + merged[i] = + incoming[i] === undefined ? existing[i] : incoming[i]; + } + + return merged; + } + + return incoming; + }, + }, + }, + }, + }, + }); + + const client = new ApolloClient({ + link: createLink({ + friendList: () => friends.map((friend) => Promise.resolve(friend)), + }), + cache, + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + client.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + }); + + const stream = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "cache-and-network" }) + ); + + await expect(stream).toEmitTypedValue({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.loading, + partial: false, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + await expect(stream).not.toEmitAnything(); +}); diff --git a/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts index 871ec3da62a..fe33f498d34 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts @@ -17,6 +17,7 @@ import { ObservableStream, promiseWithResolvers, } from "@apollo/client/testing/internal"; +import { hasDirectives } from "@apollo/client/utilities/internal"; const friends = [ { name: "Luke", id: 1 }, @@ -883,3 +884,124 @@ test("handles @defer inside @stream", async () => { await expect(observableStream).not.toEmitAnything(); }); + +test("can use custom merge function to combine cached and streamed lists", async () => { + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (existing = [], incoming, { field }) => { + if (field && hasDirectives(["stream"], field)) { + const merged: any[] = []; + + for ( + let i = 0; + i < Math.max(existing.length, incoming.length); + i++ + ) { + merged[i] = + incoming[i] === undefined ? existing[i] : incoming[i]; + } + + return merged; + } + + return incoming; + }, + }, + }, + }, + }, + }); + + const client = new ApolloClient({ + link: createLink({ + friendList: () => friends.map((friend) => Promise.resolve(friend)), + }), + cache, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + client.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + }); + + const stream = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "cache-and-network" }) + ); + + await expect(stream).toEmitTypedValue({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.loading, + partial: false, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(stream).toEmitTypedValue({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + await expect(stream).not.toEmitAnything(); +}); From 2aa31c718155e88814551afb14fd7a0035acc57d Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 15:26:09 -0600 Subject: [PATCH 216/254] Add changeset --- .changeset/cold-kiwis-give.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/cold-kiwis-give.md diff --git a/.changeset/cold-kiwis-give.md b/.changeset/cold-kiwis-give.md new file mode 100644 index 00000000000..435e5a89751 --- /dev/null +++ b/.changeset/cold-kiwis-give.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": major +--- + +Fix an issue where deferred payloads that reteurned arrays with fewer items than the original cached array would retain items from the cached array. This change includes `@stream` arrays where stream arrays replace the cached arrays. From 6dddd33d6de7b5ba86f74e248b67ff3ffe46a125 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 15:27:19 -0600 Subject: [PATCH 217/254] Update size limits --- .size-limits.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.size-limits.json b/.size-limits.json index e4f01b44776..97c94ef5038 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44194, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39041, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33526, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27519 + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44386, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39203, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33554, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27582 } From 1780c728012b8913f6b31f6543849d096743dcf9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 15:59:57 -0600 Subject: [PATCH 218/254] Add tests for refetches with defer arrays --- .../client.watchQuery/defer20220824.test.ts | 184 ++++++++++++++++ .../deferGraphQL17Alpha9.test.ts | 196 ++++++++++++++++++ 2 files changed, 380 insertions(+) diff --git a/src/core/__tests__/client.watchQuery/defer20220824.test.ts b/src/core/__tests__/client.watchQuery/defer20220824.test.ts index 36c6ba5b8bb..d8934c96ea5 100644 --- a/src/core/__tests__/client.watchQuery/defer20220824.test.ts +++ b/src/core/__tests__/client.watchQuery/defer20220824.test.ts @@ -6,6 +6,7 @@ import { InMemoryCache } from "@apollo/client/cache"; import { Defer20220824Handler } from "@apollo/client/incremental"; import { ApolloLink } from "@apollo/client/link"; import { + markAsStreaming, mockDefer20220824, ObservableStream, } from "@apollo/client/testing/internal"; @@ -163,3 +164,186 @@ test("deduplicates queries as long as a query still has deferred chunks", async // expect(query5).not.toEmitAnything(); expect(outgoingRequestSpy).toHaveBeenCalledTimes(2); }); + +it.each([["cache-first"], ["no-cache"]] as const)( + "correctly merges deleted rows when receiving a deferred payload", + async (fetchPolicy) => { + const query = gql` + query Characters { + characters { + id + uppercase + ... @defer { + lowercase + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ query, fetchPolicy }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + enqueueInitialChunk({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }, + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [{ data: { lowercase: "a" }, path: ["characters", 0] }], + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [ + { data: { lowercase: "b" }, path: ["characters", 1] }, + { data: { lowercase: "c" }, path: ["characters", 2] }, + ], + hasNext: false, + }); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + { __typename: "Character", id: 3, uppercase: "C", lowercase: "c" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + void observable.refetch(); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + { __typename: "Character", id: 3, uppercase: "C", lowercase: "c" }, + ], + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.refetch, + partial: false, + }); + + // on refetch, the list is shorter + enqueueInitialChunk({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + ], + }, + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: + // no-cache fetch policy doesn't merge with existing cache data, so + // the lowercase field is not added to each item + fetchPolicy === "no-cache" ? + [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + ] + : [ + { + __typename: "Character", + id: 1, + uppercase: "A", + lowercase: "a", + }, + { + __typename: "Character", + id: 2, + uppercase: "B", + lowercase: "b", + }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [ + { data: { lowercase: "a" }, path: ["characters", 0] }, + { data: { lowercase: "b" }, path: ["characters", 1] }, + ], + hasNext: false, + }); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + await expect(stream).not.toEmitAnything(); + } +); diff --git a/src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts index 035ce0525df..5464258a417 100644 --- a/src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts @@ -6,6 +6,7 @@ import { InMemoryCache } from "@apollo/client/cache"; import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { ApolloLink } from "@apollo/client/link"; import { + markAsStreaming, mockDeferStreamGraphQL17Alpha9, ObservableStream, } from "@apollo/client/testing/internal"; @@ -173,3 +174,198 @@ test("deduplicates queries as long as a query still has deferred chunks", async // expect(query5).not.toEmitAnything(); expect(outgoingRequestSpy).toHaveBeenCalledTimes(2); }); + +it.each([["cache-first"], ["no-cache"]] as const)( + "correctly merges deleted rows when receiving a deferred payload", + async (fetchPolicy) => { + const query = gql` + query Characters { + characters { + id + uppercase + ... @defer { + lowercase + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const observable = client.watchQuery({ query, fetchPolicy }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + enqueueInitialChunk({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }, + pending: [ + { id: "0", path: ["characters", 0] }, + { id: "1", path: ["characters", 1] }, + { id: "2", path: ["characters", 2] }, + ], + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [{ data: { lowercase: "a" }, id: "0" }], + completed: [{ id: "0" }], + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [ + { data: { lowercase: "b" }, id: "1" }, + { data: { lowercase: "c" }, id: "2" }, + ], + completed: [{ id: "1" }, { id: "2" }], + hasNext: false, + }); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + { __typename: "Character", id: 3, uppercase: "C", lowercase: "c" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + void observable.refetch(); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + { __typename: "Character", id: 3, uppercase: "C", lowercase: "c" }, + ], + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.refetch, + partial: false, + }); + + // on refetch, the list is shorter + enqueueInitialChunk({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + ], + }, + pending: [ + { id: "0", path: ["characters", 0] }, + { id: "1", path: ["characters", 1] }, + ], + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: + // no-cache fetch policy doesn't merge with existing cache data, so + // the lowercase field is not available in the refetch + fetchPolicy === "no-cache" ? + [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + ] + : [ + { + __typename: "Character", + id: 1, + uppercase: "A", + lowercase: "a", + }, + { + __typename: "Character", + id: 2, + uppercase: "B", + lowercase: "b", + }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [ + { data: { lowercase: "a" }, id: "0" }, + { data: { lowercase: "b" }, id: "1" }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + await expect(stream).not.toEmitAnything(); + } +); From f63c2b2bb207d332ccc2448681c63f589621ee09 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 16:06:19 -0600 Subject: [PATCH 219/254] Fix changeset version type --- .changeset/cold-kiwis-give.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.changeset/cold-kiwis-give.md b/.changeset/cold-kiwis-give.md index 435e5a89751..880998840fe 100644 --- a/.changeset/cold-kiwis-give.md +++ b/.changeset/cold-kiwis-give.md @@ -1,5 +1,5 @@ --- -"@apollo/client": major +"@apollo/client": minor --- Fix an issue where deferred payloads that reteurned arrays with fewer items than the original cached array would retain items from the cached array. This change includes `@stream` arrays where stream arrays replace the cached arrays. From dc712e74ae6a55965dc0d1f308ffeb100586a2b9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 16:31:00 -0600 Subject: [PATCH 220/254] Print object as array in warning --- src/cache/inmemory/writeToStore.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index b44b6eb02f6..d852dc06248 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -894,8 +894,8 @@ For more information about these options, please refer to the documentation: " have an ID or a custom merge function, or " : "", typeDotName, - { ...existing }, - { ...incoming } + Array.isArray(existing) ? [...existing] : { ...existing }, + Array.isArray(incoming) ? [...incoming] : { ...incoming } ); } From 01cace0a6d4faf79e8a4188b93c7d13c4b26d6d4 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 16:32:18 -0600 Subject: [PATCH 221/254] Add changeset --- .changeset/neat-lemons-shave.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/neat-lemons-shave.md diff --git a/.changeset/neat-lemons-shave.md b/.changeset/neat-lemons-shave.md new file mode 100644 index 00000000000..d7357691800 --- /dev/null +++ b/.changeset/neat-lemons-shave.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +Improve the cache data loss warning message when `existing` or `incoming` is an array. From 31e405e10c3b9f740e7384bc09e17dfaf00ad0ea Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 16:35:26 -0600 Subject: [PATCH 222/254] Update api report --- .../api-report-utilities_internal.api.md | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/.api-reports/api-report-utilities_internal.api.md b/.api-reports/api-report-utilities_internal.api.md index d1055bf60ca..56003dffba2 100644 --- a/.api-reports/api-report-utilities_internal.api.md +++ b/.api-reports/api-report-utilities_internal.api.md @@ -99,10 +99,23 @@ export type DecoratedPromise = PendingPromise | FulfilledPromise // @internal @deprecated (undocumented) export function decoratePromise(promise: Promise): DecoratedPromise; +// Warning: (ae-internal-mixed-release-tag) Mixed release tags are not allowed for "DeepMerger" because one of its declarations is marked as @internal +// +// @public (undocumented) +export namespace DeepMerger { + // (undocumented) + export type ArrayMergeStrategy = "truncate" | "combine"; + // (undocumented) + export interface Options { + // (undocumented) + arrayMerge?: DeepMerger.ArrayMergeStrategy; + } +} + // @internal @deprecated (undocumented) -export class DeepMerger { +export class DeepMerger { // Warning: (ae-forgotten-export) The symbol "ReconcilerFunction" needs to be exported by the entry point index.d.ts - constructor(reconciler?: ReconcilerFunction); + constructor(reconciler?: ReconcilerFunction, options?: DeepMerger.Options); // (undocumented) isObject: typeof isNonNullObject; // (undocumented) @@ -381,8 +394,6 @@ export function preventUnhandledRejection(promise: Promise): Promise; // @internal @deprecated (undocumented) export type Primitive = null | undefined | string | number | boolean | symbol | bigint; -// Warning: (ae-incompatible-release-tags) The symbol "ReconcilerFunction" is marked as @public, but its signature references "DeepMerger" which is marked as @internal -// // @public (undocumented) type ReconcilerFunction = (this: DeepMerger, target: Record, source: Record, property: string | number, ...context: TContextArgs) => any; From c496039df72de324777bc0976fae32fb2e31533a Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 15 Sep 2025 16:42:54 -0600 Subject: [PATCH 223/254] Fix tag mix in api report --- .api-reports/api-report-utilities_internal.api.md | 6 +++--- src/utilities/internal/DeepMerger.ts | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.api-reports/api-report-utilities_internal.api.md b/.api-reports/api-report-utilities_internal.api.md index 56003dffba2..790b197514d 100644 --- a/.api-reports/api-report-utilities_internal.api.md +++ b/.api-reports/api-report-utilities_internal.api.md @@ -99,9 +99,7 @@ export type DecoratedPromise = PendingPromise | FulfilledPromise // @internal @deprecated (undocumented) export function decoratePromise(promise: Promise): DecoratedPromise; -// Warning: (ae-internal-mixed-release-tag) Mixed release tags are not allowed for "DeepMerger" because one of its declarations is marked as @internal -// -// @public (undocumented) +// @internal @deprecated (undocumented) export namespace DeepMerger { // (undocumented) export type ArrayMergeStrategy = "truncate" | "combine"; @@ -394,6 +392,8 @@ export function preventUnhandledRejection(promise: Promise): Promise; // @internal @deprecated (undocumented) export type Primitive = null | undefined | string | number | boolean | symbol | bigint; +// Warning: (ae-incompatible-release-tags) The symbol "ReconcilerFunction" is marked as @public, but its signature references "DeepMerger" which is marked as @internal +// // @public (undocumented) type ReconcilerFunction = (this: DeepMerger, target: Record, source: Record, property: string | number, ...context: TContextArgs) => any; diff --git a/src/utilities/internal/DeepMerger.ts b/src/utilities/internal/DeepMerger.ts index 5f3425e8711..91d28780387 100644 --- a/src/utilities/internal/DeepMerger.ts +++ b/src/utilities/internal/DeepMerger.ts @@ -18,6 +18,7 @@ const defaultReconciler: ReconcilerFunction = function ( return this.merge(target[property], source[property]); }; +/** @internal */ export declare namespace DeepMerger { export interface Options { arrayMerge?: DeepMerger.ArrayMergeStrategy; From 3cb80fcddf14ef86f8fccf2b4a33b76dec79d954 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 10:48:57 -0600 Subject: [PATCH 224/254] Turn off non tracking renders --- .../__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx index 51770ed06e9..3e7fc588b12 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx @@ -74,7 +74,7 @@ async function renderSuspenseHook< const { render, takeRender, replaceSnapshot, getCurrentRender } = createRenderStream< useSuspenseQuery.Result | { error: ErrorLike } - >(); + >({ skipNonTrackingRenders: true }); const utils = await render(, options); From 2179c8f3797e4b1b47a799de6cbb9a167e3fe5fe Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 10:49:17 -0600 Subject: [PATCH 225/254] Remove duplicate test and enable failing test --- .../deferGraphQL17Alpha9.test.tsx | 298 ++---------------- 1 file changed, 29 insertions(+), 269 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx index 3e7fc588b12..e156e40ef29 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx @@ -24,7 +24,6 @@ import { markAsStreaming, mockDeferStreamGraphQL17Alpha9, spyOnConsole, - wait, } from "@apollo/client/testing/internal"; import { offsetLimitPagination } from "@apollo/client/utilities"; import { invariant } from "@apollo/client/utilities/invariant"; @@ -1196,18 +1195,8 @@ test("incrementally renders data returned after skipping a deferred query", asyn await expect(takeRender).not.toRerender(); }); -// TODO: This test is a bit of a lie. `fetchMore` should incrementally -// rerender when using `@defer` but there is currently a bug in the core -// implementation that prevents updates until the final result is returned. -// This test reflects the behavior as it exists today, but will need -// to be updated once the core bug is fixed. -// -// NOTE: A duplicate it.failng test has been added right below this one with -// the expected behavior added in (i.e. the commented code in this test). Once -// the core bug is fixed, this test can be removed in favor of the other test. -// // https://github.com/apollographql/apollo-client/issues/11034 -test("rerenders data returned by `fetchMore` for a deferred query", async () => { +it("incrementally rerenders data returned by a `fetchMore` for a deferred query", async () => { const query = gql` query ($offset: Int) { greetings(offset: $offset) { @@ -1243,9 +1232,7 @@ test("rerenders data returned by `fetchMore` for a deferred query", async () => using _disabledAct = disableActEnvironment(); const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( () => useSuspenseQuery(query, { variables: { offset: 0 } }), - { - wrapper: createClientWrapper(client), - } + { wrapper: createClientWrapper(client) } ); { @@ -1335,35 +1322,33 @@ test("rerenders data returned by `fetchMore` for a deferred query", async () => hasNext: true, }); - // TODO: Re-enable once the core bug is fixed - // { - // const { snapshot, renderedComponents } = await takeRender(); - // - // expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - // expect(snapshot).toStrictEqualTyped({ - // data: markAsStreaming({ - // greetings: [ - // { - // __typename: "Greeting", - // message: "Hello world", - // recipient: { - // __typename: "Person", - // name: "Alice", - // }, - // }, - // { - // __typename: "Greeting", - // message: "Goodbye", - // }, - // ], - // }), - // dataState: "streaming", - // networkStatus: NetworkStatus.streaming, - // error: undefined, - // }); - // } - - await wait(0); + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + enqueueSubsequentChunk({ incremental: [ { @@ -1426,231 +1411,6 @@ test("rerenders data returned by `fetchMore` for a deferred query", async () => await expect(takeRender).not.toRerender(); }); -// TODO: This is a duplicate of the test above, but with the expected behavior -// added (hence the `it.failing`). Remove the previous test once issue #11034 -// is fixed. -// -// https://github.com/apollographql/apollo-client/issues/11034 -it.failing( - "incrementally rerenders data returned by a `fetchMore` for a deferred query", - async () => { - const query = gql` - query ($offset: Int) { - greetings(offset: $offset) { - message - ... @defer { - recipient { - name - } - } - } - } - `; - - const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = - mockDeferStreamGraphQL17Alpha9(); - - const cache = new InMemoryCache({ - typePolicies: { - Query: { - fields: { - greetings: offsetLimitPagination(), - }, - }, - }, - }); - - const client = new ApolloClient({ - link: httpLink, - cache, - incrementalHandler: new GraphQL17Alpha9Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( - () => useSuspenseQuery(query, { variables: { offset: 0 } }), - { - wrapper: createClientWrapper(client), - } - ); - - { - const { renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } - - enqueueInitialChunk({ - data: { - greetings: [{ __typename: "Greeting", message: "Hello world" }], - }, - pending: [{ id: "0", path: ["greetings", 0] }], - hasNext: true, - }); - - { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - greetings: [{ __typename: "Greeting", message: "Hello world" }], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } - - enqueueSubsequentChunk({ - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - }, - id: "0", - }, - ], - completed: [{ id: "0" }], - hasNext: false, - }); - - { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } - - const fetchMorePromise = getCurrentSnapshot().fetchMore({ - variables: { offset: 1 }, - }); - - { - const { renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } - - enqueueInitialChunk({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Goodbye", - }, - ], - }, - pending: [{ id: "0", path: ["greetings", 0] }], - hasNext: true, - }); - - { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - { - __typename: "Greeting", - message: "Goodbye", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } - - enqueueSubsequentChunk({ - incremental: [ - { - data: { - recipient: { name: "Bob", __typename: "Person" }, - }, - id: "0", - }, - ], - completed: [{ id: "0" }], - hasNext: false, - }); - - { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } - - await expect(fetchMorePromise).resolves.toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - ], - }, - }); - - await expect(takeRender).not.toRerender(); - } -); - test("throws network errors returned by deferred queries", async () => { using _consoleSpy = spyOnConsole("error"); From 4226a2ed2f1ca59a59c082319028752f5464f80a Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 10:49:29 -0600 Subject: [PATCH 226/254] it -> test --- .../__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx index e156e40ef29..f928f171c9f 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/deferGraphQL17Alpha9.test.tsx @@ -1196,7 +1196,7 @@ test("incrementally renders data returned after skipping a deferred query", asyn }); // https://github.com/apollographql/apollo-client/issues/11034 -it("incrementally rerenders data returned by a `fetchMore` for a deferred query", async () => { +test("incrementally rerenders data returned by a `fetchMore` for a deferred query", async () => { const query = gql` query ($offset: Int) { greetings(offset: $offset) { From 571d835a6a9b1ca5074161284d8ee7772763f1c8 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 16:44:04 -0600 Subject: [PATCH 227/254] Fix fetchMore with incremental results by emitting incremental results --- src/core/ObservableQuery.ts | 115 +++++++++++++++++++++++++----------- 1 file changed, 80 insertions(+), 35 deletions(-) diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts index 37218418bf5..6e39139a9fb 100644 --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -14,7 +14,10 @@ import type { Cache, MissingFieldError } from "@apollo/client/cache"; import type { MissingTree } from "@apollo/client/cache"; import type { MaybeMasked, Unmasked } from "@apollo/client/masking"; import type { DeepPartial } from "@apollo/client/utilities"; -import { isNetworkRequestInFlight } from "@apollo/client/utilities"; +import { + isNetworkRequestInFlight, + isNetworkRequestSettled, +} from "@apollo/client/utilities"; import { __DEV__ } from "@apollo/client/utilities/environment"; import { compact, @@ -806,6 +809,7 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, TFetchVars > ): Promise>; + public fetchMore< TFetchData = TData, TFetchVars extends OperationVariables = TVariables, @@ -863,7 +867,7 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, this.transformDocument(this.options.query) : combinedOptions.query; - let wasUpdated = false; + // let wasUpdated = false; const isCached = this.options.fetchPolicy !== "no-cache"; @@ -877,6 +881,7 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, const { finalize, pushNotification } = this.pushOperation( NetworkStatus.fetchMore ); + pushNotification( { source: "newNetworkStatus", @@ -885,14 +890,39 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, }, { shouldEmit: EmitBehavior.networkStatusChange } ); - return this.queryManager - .fetchQuery(combinedOptions, NetworkStatus.fetchMore) - .then((fetchMoreResult) => { - // disable the `fetchMore` override that is currently active - // the next updates caused by this should not be `fetchMore` anymore, - // but `ready` or whatever other calculated loading state is currently - // appropriate - finalize(); + + let wasUpdated = false; + const { promise, operator } = getTrackingOperatorPromise( + (value: QueryNotification.Value) => { + switch (value.kind) { + case "E": { + throw value.error; + } + case "N": { + if (value.source !== "newNetworkStatus" && !value.value.loading) { + return value.value; + } + } + } + } + ); + + const { observable } = this.queryManager.fetchObservableWithInfo( + combinedOptions, + { networkStatus: NetworkStatus.fetchMore } + ); + + observable.pipe(operator).subscribe({ + next: (notification) => { + if (notification.kind !== "N" || notification.source !== "network") { + return; + } + + const fetchMoreResult = notification.value; + + if (isNetworkRequestSettled(notification.value.networkStatus)) { + finalize(); + } if (isCached) { // Performing this cache update inside a cache.batch transaction ensures @@ -930,9 +960,27 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, } }, - onWatchUpdated: (watch) => { + onWatchUpdated: (watch, diff) => { if (watch.watcher === this) { wasUpdated = true; + const lastResult = this.getCurrentResult(); + pushNotification({ + ...notification, + value: { + ...lastResult, + networkStatus: + fetchMoreResult.networkStatus === NetworkStatus.error ? + NetworkStatus.ready + : fetchMoreResult.networkStatus, + // will be overwritten anyways, just here for types sake + loading: false, + data: diff.result, + dataState: + fetchMoreResult.dataState === "streaming" ? + "streaming" + : "complete", + }, + }); } }, }); @@ -956,9 +1004,9 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, fetchMoreResult: fetchMoreResult.data as Unmasked, variables: combinedOptions.variables as TFetchVars, }); - // was reportResult + pushNotification({ - kind: "N", + ...notification, value: { ...lastResult, networkStatus: NetworkStatus.ready, @@ -968,32 +1016,29 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, dataState: lastResult.dataState === "streaming" ? "streaming" : "complete", }, - source: "network", }); } + }, + }); - return this.maskResult(fetchMoreResult); - }) - .finally(() => { - // call `finalize` a second time in case the `.then` case above was not reached - finalize(); + return preventUnhandledRejection( + promise + .then((result) => toQueryResult(this.maskResult(result))) + .finally(() => { + if (isCached && !wasUpdated) { + finalize(); - // In case the cache writes above did not generate a broadcast - // notification (which would have been intercepted by onWatchUpdated), - // likely because the written data were the same as what was already in - // the cache, we still want fetchMore to deliver its final loading:false - // result with the unchanged data. - if (isCached && !wasUpdated) { - pushNotification( - { - kind: "N", - source: "newNetworkStatus", - value: {}, - }, - { shouldEmit: EmitBehavior.force } - ); - } - }); + pushNotification( + { + kind: "N", + source: "newNetworkStatus", + value: {}, + }, + { shouldEmit: EmitBehavior.force } + ); + } + }) + ); } // XXX the subscription variables are separate from the query variables. From 39d08d9cd87d333abea3d37ffdc5d8ad63a7576e Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 17:12:42 -0600 Subject: [PATCH 228/254] Use === --- src/core/ObservableQuery.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts index 6e39139a9fb..e55e29d0cd3 100644 --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -1712,8 +1712,8 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, if ( dirty && - (this.options.fetchPolicy == "cache-only" || - this.options.fetchPolicy == "cache-and-network" || + (this.options.fetchPolicy === "cache-only" || + this.options.fetchPolicy === "cache-and-network" || !this.activeOperations.size) ) { const diff = this.getCacheDiff(); From bdb5de08ddd1acdcef6bdc3befa5d322122a18dc Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 17:17:40 -0600 Subject: [PATCH 229/254] Re-enable fetchMore with incremental results for defer20220824 tests --- .../useSuspenseQuery/defer20220824.test.tsx | 289 ++---------------- 1 file changed, 28 insertions(+), 261 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx index dcf8e4a32cb..a0a3b9192bf 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx @@ -1171,18 +1171,8 @@ test("incrementally renders data returned after skipping a deferred query", asyn await expect(takeRender).not.toRerender(); }); -// TODO: This test is a bit of a lie. `fetchMore` should incrementally -// rerender when using `@defer` but there is currently a bug in the core -// implementation that prevents updates until the final result is returned. -// This test reflects the behavior as it exists today, but will need -// to be updated once the core bug is fixed. -// -// NOTE: A duplicate it.failng test has been added right below this one with -// the expected behavior added in (i.e. the commented code in this test). Once -// the core bug is fixed, this test can be removed in favor of the other test. -// // https://github.com/apollographql/apollo-client/issues/11034 -test("rerenders data returned by `fetchMore` for a deferred query", async () => { +test("incrementally rerenders data returned by a `fetchMore` for a deferred query", async () => { const query = gql` query ($offset: Int) { greetings(offset: $offset) { @@ -1307,35 +1297,33 @@ test("rerenders data returned by `fetchMore` for a deferred query", async () => hasNext: true, }); - // TODO: Re-enable once the core bug is fixed - // { - // const { snapshot, renderedComponents } = await takeRender(); - // - // expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - // expect(snapshot).toStrictEqualTyped({ - // data: markAsStreaming({ - // greetings: [ - // { - // __typename: "Greeting", - // message: "Hello world", - // recipient: { - // __typename: "Person", - // name: "Alice", - // }, - // }, - // { - // __typename: "Greeting", - // message: "Goodbye", - // }, - // ], - // }), - // dataState: "streaming", - // networkStatus: NetworkStatus.streaming, - // error: undefined, - // }); - // } - - await wait(0); + { + const { snapshot, renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, + }); + } + enqueueSubsequentChunk({ incremental: [ { @@ -1397,227 +1385,6 @@ test("rerenders data returned by `fetchMore` for a deferred query", async () => await expect(takeRender).not.toRerender(); }); -// TODO: This is a duplicate of the test above, but with the expected behavior -// added (hence the `it.failing`). Remove the previous test once issue #11034 -// is fixed. -// -// https://github.com/apollographql/apollo-client/issues/11034 -it.failing( - "incrementally rerenders data returned by a `fetchMore` for a deferred query", - async () => { - const query = gql` - query ($offset: Int) { - greetings(offset: $offset) { - message - ... @defer { - recipient { - name - } - } - } - } - `; - - const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = - mockDefer20220824(); - - const cache = new InMemoryCache({ - typePolicies: { - Query: { - fields: { - greetings: offsetLimitPagination(), - }, - }, - }, - }); - - const client = new ApolloClient({ - link: httpLink, - cache, - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( - () => useSuspenseQuery(query, { variables: { offset: 0 } }), - { - wrapper: createClientWrapper(client), - } - ); - - { - const { renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } - - enqueueInitialChunk({ - data: { - greetings: [{ __typename: "Greeting", message: "Hello world" }], - }, - hasNext: true, - }); - - { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - greetings: [{ __typename: "Greeting", message: "Hello world" }], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } - - enqueueSubsequentChunk({ - incremental: [ - { - data: { - recipient: { name: "Alice", __typename: "Person" }, - }, - path: ["greetings", 0], - }, - ], - hasNext: false, - }); - - { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } - - const fetchMorePromise = getCurrentSnapshot().fetchMore({ - variables: { offset: 1 }, - }); - - { - const { renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } - - enqueueInitialChunk({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Goodbye", - }, - ], - }, - hasNext: true, - }); - - { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - { - __typename: "Greeting", - message: "Goodbye", - }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } - - enqueueSubsequentChunk({ - incremental: [ - { - data: { - recipient: { name: "Bob", __typename: "Person" }, - }, - path: ["greetings", 0], - }, - ], - hasNext: false, - }); - - { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Hello world", - recipient: { - __typename: "Person", - name: "Alice", - }, - }, - { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } - - await expect(fetchMorePromise!).resolves.toStrictEqualTyped({ - data: { - greetings: [ - { - __typename: "Greeting", - message: "Goodbye", - recipient: { - __typename: "Person", - name: "Bob", - }, - }, - ], - }, - }); - - await expect(takeRender).not.toRerender(); - } -); - test("throws network errors returned by deferred queries", async () => { using _consoleSpy = spyOnConsole("error"); From 4874229b1223f6b728e909726ac967fca4aaf97f Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 17:48:47 -0600 Subject: [PATCH 230/254] Ensure stream results that emit hasNext: false only set network status --- src/core/ObservableQuery.ts | 35 ++++++++++++++++++++++++++--------- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts index e55e29d0cd3..8ad41eaf640 100644 --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -912,8 +912,9 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, { networkStatus: NetworkStatus.fetchMore } ); - observable.pipe(operator).subscribe({ + const subscription = observable.pipe(operator).subscribe({ next: (notification) => { + wasUpdated = false; if (notification.kind !== "N" || notification.source !== "network") { return; } @@ -965,7 +966,8 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, wasUpdated = true; const lastResult = this.getCurrentResult(); pushNotification({ - ...notification, + kind: "N", + source: "network", value: { ...lastResult, networkStatus: @@ -1025,17 +1027,32 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, promise .then((result) => toQueryResult(this.maskResult(result))) .finally(() => { + subscription.unsubscribe(); if (isCached && !wasUpdated) { finalize(); - pushNotification( - { + const lastResult = this.getCurrentResult(); + + if (lastResult.networkStatus === NetworkStatus.streaming) { + pushNotification({ kind: "N", - source: "newNetworkStatus", - value: {}, - }, - { shouldEmit: EmitBehavior.force } - ); + source: "network", + value: { + ...lastResult, + dataState: "complete", + networkStatus: NetworkStatus.ready, + } as any, + }); + } else { + pushNotification( + { + kind: "N", + source: "newNetworkStatus", + value: {}, + }, + { shouldEmit: EmitBehavior.force } + ); + } } }) ); From 0c48f0a5aba1219117172577f2861b29a91ba026 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 17:50:29 -0600 Subject: [PATCH 231/254] Enable stream tests with fetchMore incremental rendering --- .../streamDefer20220824.test.tsx | 380 +++++------------ .../streamGraphQL17Alpha9.test.tsx | 384 +++++------------- 2 files changed, 212 insertions(+), 552 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx index 4468bb2a2d2..a14e4145350 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx @@ -822,322 +822,152 @@ test("incrementally renders data returned after skipping a streamed query", asyn await expect(takeRender).not.toRerender(); }); -// TODO: This test is a bit of a lie. `fetchMore` should incrementally -// rerender when using `@stream` but there is currently a bug in the core -// implementation that prevents updates until the final result is returned. -// This test reflects the behavior as it exists today, but will need -// to be updated once the core bug is fixed. -// -// NOTE: A duplicate it.failng test has been added right below this one with -// the expected behavior added in (i.e. the commented code in this test). Once -// the core bug is fixed, this test can be removed in favor of the other test. -// // https://github.com/apollographql/apollo-client/issues/11034 -test.failing( - "rerenders data returned by `fetchMore` for a streamed query", - async () => { - let subject!: Subject; - const query = gql` - query ($offset: Int) { - friendList(offset: $offset) @stream(initialCount: 1) { - id - name - } +test("incrementally rerenders data returned by a `fetchMore` for a streamed query", async () => { + let subject!: Subject; + const query = gql` + query ($offset: Int) { + friendList(offset: $offset) @stream(initialCount: 1) { + id + name } - `; + } + `; - const cache = new InMemoryCache({ - typePolicies: { - Query: { - fields: { - greetings: offsetLimitPagination(), - }, + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: offsetLimitPagination(), }, }, - }); - - const client = new ApolloClient({ - link: createLink({ - friendList: () => { - const iterator = asyncIterableSubject(); - subject = iterator.subject; - - return iterator.stream; - }, - }), - cache, - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( - () => useSuspenseQuery(query, { variables: { offset: 0 } }), - { wrapper: createClientWrapper(client) } - ); - - { - const { renderedComponents } = await takeRender(); + }, + }); - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } + const client = new ApolloClient({ + link: createLink({ + friendList: () => { + const iterator = asyncIterableSubject(); + subject = iterator.subject; - subject.next(friends[0]); + return iterator.stream; + }, + }), + cache, + incrementalHandler: new Defer20220824Handler(), + }); - { - const { snapshot, renderedComponents } = await takeRender(); + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { wrapper: createClientWrapper(client) } + ); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + { + const { renderedComponents } = await takeRender(); - subject.next(friends[1]); - subject.complete(); + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[0]); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - const fetchMorePromise = getCurrentSnapshot().fetchMore({ - variables: { offset: 2 }, + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, }); + } - { - const { renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } - - subject.next(friends[2]); - - // TODO: Re-enable once the core bug is fixed - // { - // const { snapshot, renderedComponents } = await takeRender(); - // - // expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - // expect(snapshot).toStrictEqualTyped({ - // data: markAsStreaming({ - // friendList: [ - // { __typename: "Friend", id: "1", name: "Luke" }, - // { __typename: "Friend", id: "2", name: "Han" }, - // { __typename: "Friend", id: "3", name: "Leia" }, - // ], - // }), - // dataState: "streaming", - // networkStatus: NetworkStatus.streaming, - // error: undefined, - // }); - // } - - await wait(0); - subject.next({ id: 4, name: "Chewbacca" }); - subject.complete(); - - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[1]); + subject.complete(); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - { __typename: "Friend", id: "4", name: "Chewbacca" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - await expect(fetchMorePromise).resolves.toStrictEqualTyped({ + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ data: { friendList: [ - { __typename: "Friend", id: "3", name: "Leia" }, - { __typename: "Friend", id: "4", name: "Chewbacca" }, + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, ], }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, }); - - await expect(takeRender).not.toRerender(); } -); - -// TODO: This is a duplicate of the test above, but with the expected behavior -// added (hence the `it.failing`). Remove the previous test once issue #11034 -// is fixed. -// -// https://github.com/apollographql/apollo-client/issues/11034 -test.failing( - "incrementally rerenders data returned by a `fetchMore` for a streamed query", - async () => { - let subject!: Subject; - const query = gql` - query ($offset: Int) { - friendList(offset: $offset) @stream(initialCount: 1) { - id - name - } - } - `; - - const cache = new InMemoryCache({ - typePolicies: { - Query: { - fields: { - greetings: offsetLimitPagination(), - }, - }, - }, - }); - const client = new ApolloClient({ - link: createLink({ - friendList: () => { - const iterator = asyncIterableSubject(); - subject = iterator.subject; - - return iterator.stream; - }, - }), - cache, - incrementalHandler: new Defer20220824Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( - () => useSuspenseQuery(query, { variables: { offset: 0 } }), - { wrapper: createClientWrapper(client) } - ); - - { - const { renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } - - subject.next(friends[0]); - - { - const { snapshot, renderedComponents } = await takeRender(); + const fetchMorePromise = getCurrentSnapshot().fetchMore({ + variables: { offset: 2 }, + }); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + { + const { renderedComponents } = await takeRender(); - subject.next(friends[1]); - subject.complete(); + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[2]); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - const fetchMorePromise = getCurrentSnapshot().fetchMore({ - variables: { offset: 2 }, + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, }); + } - { - const { renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } - - subject.next(friends[2]); - - { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } - - await wait(0); - subject.next({ id: 4, name: "Chewbacca" }); - subject.complete(); - - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next({ id: 4, name: "Chewbacca" }); + subject.complete(); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - { __typename: "Friend", id: "4", name: "Chewbacca" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - await expect(fetchMorePromise).resolves.toStrictEqualTyped({ + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ data: { friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, { __typename: "Friend", id: "3", name: "Leia" }, { __typename: "Friend", id: "4", name: "Chewbacca" }, ], }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, }); - - await expect(takeRender).not.toRerender(); } -); + + await expect(fetchMorePromise).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "3", name: "Leia" }, + { __typename: "Friend", id: "4", name: "Chewbacca" }, + ], + }, + }); + + await expect(takeRender).not.toRerender(); +}); test("throws network errors returned by streamed queries", async () => { using _consoleSpy = spyOnConsole("error"); diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index 791d712bd25..63d97b84f78 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -822,322 +822,152 @@ test("incrementally renders data returned after skipping a streamed query", asyn await expect(takeRender).not.toRerender(); }); -// TODO: This test is a bit of a lie. `fetchMore` should incrementally -// rerender when using `@stream` but there is currently a bug in the core -// implementation that prevents updates until the final result is returned. -// This test reflects the behavior as it exists today, but will need -// to be updated once the core bug is fixed. -// -// NOTE: A duplicate it.failng test has been added right below this one with -// the expected behavior added in (i.e. the commented code in this test). Once -// the core bug is fixed, this test can be removed in favor of the other test. -// // https://github.com/apollographql/apollo-client/issues/11034 -test.failing( - "rerenders data returned by `fetchMore` for a streamed query", - async () => { - let subject!: Subject; - const query = gql` - query ($offset: Int) { - friendList(offset: $offset) @stream(initialCount: 1) { - id - name - } +test("incrementally rerenders data returned by a `fetchMore` for a streamed query", async () => { + let subject!: Subject; + const query = gql` + query ($offset: Int) { + friendList(offset: $offset) @stream(initialCount: 1) { + id + name } - `; + } + `; - const cache = new InMemoryCache({ - typePolicies: { - Query: { - fields: { - greetings: offsetLimitPagination(), - }, + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: offsetLimitPagination(), }, }, - }); - - const client = new ApolloClient({ - link: createLink({ - friendList: () => { - const iterator = asyncIterableSubject(); - subject = iterator.subject; - - return iterator.stream; - }, - }), - cache, - incrementalHandler: new GraphQL17Alpha9Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( - () => useSuspenseQuery(query, { variables: { offset: 0 } }), - { wrapper: createClientWrapper(client) } - ); - - { - const { renderedComponents } = await takeRender(); + }, + }); - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } + const client = new ApolloClient({ + link: createLink({ + friendList: () => { + const iterator = asyncIterableSubject(); + subject = iterator.subject; - subject.next(friends[0]); + return iterator.stream; + }, + }), + cache, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); - { - const { snapshot, renderedComponents } = await takeRender(); + using _disabledAct = disableActEnvironment(); + const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { wrapper: createClientWrapper(client) } + ); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + { + const { renderedComponents } = await takeRender(); - subject.next(friends[1]); - subject.complete(); + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[0]); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - const fetchMorePromise = getCurrentSnapshot().fetchMore({ - variables: { offset: 2 }, + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, }); + } - { - const { renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } - - subject.next(friends[2]); - - // TODO: Re-enable once the core bug is fixed - // { - // const { snapshot, renderedComponents } = await takeRender(); - // - // expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - // expect(snapshot).toStrictEqualTyped({ - // data: markAsStreaming({ - // friendList: [ - // { __typename: "Friend", id: "1", name: "Luke" }, - // { __typename: "Friend", id: "2", name: "Han" }, - // { __typename: "Friend", id: "3", name: "Leia" }, - // ], - // }), - // dataState: "streaming", - // networkStatus: NetworkStatus.streaming, - // error: undefined, - // }); - // } - - await wait(0); - subject.next({ id: 4, name: "Chewbacca" }); - subject.complete(); - - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[1]); + subject.complete(); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - { __typename: "Friend", id: "4", name: "Chewbacca" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - await expect(fetchMorePromise).resolves.toStrictEqualTyped({ + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ data: { friendList: [ - { __typename: "Friend", id: "3", name: "Leia" }, - { __typename: "Friend", id: "4", name: "Chewbacca" }, + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, ], }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, }); - - await expect(takeRender).not.toRerender(); } -); - -// TODO: This is a duplicate of the test above, but with the expected behavior -// added (hence the `it.failing`). Remove the previous test once issue #11034 -// is fixed. -// -// https://github.com/apollographql/apollo-client/issues/11034 -test.failing( - "incrementally rerenders data returned by a `fetchMore` for a streamed query", - async () => { - let subject!: Subject; - const query = gql` - query ($offset: Int) { - friendList(offset: $offset) @stream(initialCount: 1) { - id - name - } - } - `; - - const cache = new InMemoryCache({ - typePolicies: { - Query: { - fields: { - greetings: offsetLimitPagination(), - }, - }, - }, - }); - const client = new ApolloClient({ - link: createLink({ - friendList: () => { - const iterator = asyncIterableSubject(); - subject = iterator.subject; - - return iterator.stream; - }, - }), - cache, - incrementalHandler: new GraphQL17Alpha9Handler(), - }); - - using _disabledAct = disableActEnvironment(); - const { takeRender, getCurrentSnapshot } = await renderSuspenseHook( - () => useSuspenseQuery(query, { variables: { offset: 0 } }), - { wrapper: createClientWrapper(client) } - ); - - { - const { renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } - - subject.next(friends[0]); - - { - const { snapshot, renderedComponents } = await takeRender(); + const fetchMorePromise = getCurrentSnapshot().fetchMore({ + variables: { offset: 2 }, + }); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } + { + const { renderedComponents } = await takeRender(); - subject.next(friends[1]); - subject.complete(); + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next(friends[2]); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - const fetchMorePromise = getCurrentSnapshot().fetchMore({ - variables: { offset: 2 }, + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + error: undefined, }); + } - { - const { renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); - } - - subject.next(friends[2]); - - { - const { snapshot, renderedComponents } = await takeRender(); - - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], - }), - dataState: "streaming", - networkStatus: NetworkStatus.streaming, - error: undefined, - }); - } - - await wait(0); - subject.next({ id: 4, name: "Chewbacca" }); - subject.complete(); - - { - const { snapshot, renderedComponents } = await takeRender(); + subject.next({ id: 4, name: "Chewbacca" }); + subject.complete(); - expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); - expect(snapshot).toStrictEqualTyped({ - data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - { __typename: "Friend", id: "4", name: "Chewbacca" }, - ], - }, - dataState: "complete", - networkStatus: NetworkStatus.ready, - error: undefined, - }); - } + { + const { snapshot, renderedComponents } = await takeRender(); - await expect(fetchMorePromise).resolves.toStrictEqualTyped({ - data: { + expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); + expect(snapshot).toStrictEqualTyped({ + data: markAsStreaming({ friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, { __typename: "Friend", id: "3", name: "Leia" }, { __typename: "Friend", id: "4", name: "Chewbacca" }, ], - }, + }), + dataState: "complete", + networkStatus: NetworkStatus.ready, + error: undefined, }); - - await expect(takeRender).not.toRerender(); } -); + + await expect(fetchMorePromise).resolves.toStrictEqualTyped({ + data: { + friendList: [ + { __typename: "Friend", id: "3", name: "Leia" }, + { __typename: "Friend", id: "4", name: "Chewbacca" }, + ], + }, + }); + + await expect(takeRender).not.toRerender(); +}); test("throws network errors returned by streamed queries", async () => { using _consoleSpy = spyOnConsole("error"); From f538a83621e1d110286c056dd8e91611dfd9a1d3 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 17:51:59 -0600 Subject: [PATCH 232/254] Add changeset --- .changeset/funny-bats-hammer.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/funny-bats-hammer.md diff --git a/.changeset/funny-bats-hammer.md b/.changeset/funny-bats-hammer.md new file mode 100644 index 00000000000..9848f45763b --- /dev/null +++ b/.changeset/funny-bats-hammer.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +Fix an issue where calling `fetchMore` with `@defer` or `@stream` would not rerender incremental results as they were streamed. From 38fdfb605a0a721a29315232f9021fc440106b17 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 17:52:41 -0600 Subject: [PATCH 233/254] Update size limits --- .size-limits.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.size-limits.json b/.size-limits.json index 97c94ef5038..e984d952174 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44386, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39203, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33554, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27582 + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44530, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39381, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33704, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27701 } From d7bda3c657f399f63e497846c99fa05e18356f80 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 17:55:42 -0600 Subject: [PATCH 234/254] Remove unused import --- .../hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx index a0a3b9192bf..4f5aac41f24 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/defer20220824.test.tsx @@ -24,7 +24,6 @@ import { markAsStreaming, mockDefer20220824, spyOnConsole, - wait, } from "@apollo/client/testing/internal"; import { offsetLimitPagination } from "@apollo/client/utilities"; import { invariant } from "@apollo/client/utilities/invariant"; From fc11c65d9c3c7b560695412dce0487c77f3604ac Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 17:57:42 -0600 Subject: [PATCH 235/254] Remove commented code --- src/core/ObservableQuery.ts | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts index 8ad41eaf640..fcc161b5c1e 100644 --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -867,8 +867,7 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, this.transformDocument(this.options.query) : combinedOptions.query; - // let wasUpdated = false; - + let wasUpdated = false; const isCached = this.options.fetchPolicy !== "no-cache"; if (!isCached) { @@ -891,7 +890,6 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, { shouldEmit: EmitBehavior.networkStatusChange } ); - let wasUpdated = false; const { promise, operator } = getTrackingOperatorPromise( (value: QueryNotification.Value) => { switch (value.kind) { From 982a0b6259e11ce3ef5379a7670107298695aa91 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 17:59:26 -0600 Subject: [PATCH 236/254] Tweak to be more like original --- src/core/ObservableQuery.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts index fcc161b5c1e..37a6d36b5ee 100644 --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -1006,7 +1006,7 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, }); pushNotification({ - ...notification, + kind: "N", value: { ...lastResult, networkStatus: NetworkStatus.ready, @@ -1016,6 +1016,7 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, dataState: lastResult.dataState === "streaming" ? "streaming" : "complete", }, + source: "network", }); } }, From c361a8c85d567fb807335fca09d96581198cf689 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 18:10:14 -0600 Subject: [PATCH 237/254] Remove unneeded markAsStreaming --- .../__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index 63d97b84f78..7a638011913 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -943,14 +943,14 @@ test("incrementally rerenders data returned by a `fetchMore` for a streamed quer expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ - data: markAsStreaming({ + data: { friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, { __typename: "Friend", id: "3", name: "Leia" }, { __typename: "Friend", id: "4", name: "Chewbacca" }, ], - }), + }, dataState: "complete", networkStatus: NetworkStatus.ready, error: undefined, From f42972244d3f4d8994dd691870e18c631d0b6749 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 18:16:02 -0600 Subject: [PATCH 238/254] Reset wasUpdated only on processed messages --- src/core/ObservableQuery.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts index 37a6d36b5ee..efd5a44c9e8 100644 --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -912,11 +912,11 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, const subscription = observable.pipe(operator).subscribe({ next: (notification) => { - wasUpdated = false; if (notification.kind !== "N" || notification.source !== "network") { return; } + wasUpdated = false; const fetchMoreResult = notification.value; if (isNetworkRequestSettled(notification.value.networkStatus)) { From fc0ad32fdbaf5fa14dd25879b8c70777591b33c8 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 18:19:59 -0600 Subject: [PATCH 239/254] Use filter --- src/core/ObservableQuery.ts | 226 +++++++++++++++++++----------------- 1 file changed, 118 insertions(+), 108 deletions(-) diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts index efd5a44c9e8..b7114bfe9f3 100644 --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -8,7 +8,7 @@ import type { Subscribable, Subscription, } from "rxjs"; -import { BehaviorSubject, Observable, share, Subject, tap } from "rxjs"; +import { BehaviorSubject, filter, Observable, share, Subject, tap } from "rxjs"; import type { Cache, MissingFieldError } from "@apollo/client/cache"; import type { MissingTree } from "@apollo/client/cache"; @@ -910,117 +910,127 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, { networkStatus: NetworkStatus.fetchMore } ); - const subscription = observable.pipe(operator).subscribe({ - next: (notification) => { - if (notification.kind !== "N" || notification.source !== "network") { - return; - } - - wasUpdated = false; - const fetchMoreResult = notification.value; + const subscription = observable + .pipe( + operator, + filter( + ( + notification + ): notification is Extract< + QueryNotification.FromNetwork, + { kind: "N" } + > => notification.kind === "N" && notification.source === "network" + ) + ) + .subscribe({ + next: (notification) => { + wasUpdated = false; + const fetchMoreResult = notification.value; - if (isNetworkRequestSettled(notification.value.networkStatus)) { - finalize(); - } + if (isNetworkRequestSettled(notification.value.networkStatus)) { + finalize(); + } - if (isCached) { - // Performing this cache update inside a cache.batch transaction ensures - // any affected cache.watch watchers are notified at most once about any - // updates. Most watchers will be using the QueryInfo class, which - // responds to notifications by calling reobserveCacheFirst to deliver - // fetchMore cache results back to this ObservableQuery. - this.queryManager.cache.batch({ - update: (cache) => { - if (updateQuery) { - cache.updateQuery( - { - query: this.query, - variables: this.variables, - returnPartialData: true, - optimistic: false, - }, - (previous) => - updateQuery(previous! as any, { - fetchMoreResult: fetchMoreResult.data as any, - variables: combinedOptions.variables as TFetchVars, - }) - ); - } else { - // If we're using a field policy instead of updateQuery, the only - // thing we need to do is write the new data to the cache using - // combinedOptions.variables (instead of this.variables, which is - // what this.updateQuery uses, because it works by abusing the - // original field value, keyed by the original variables). - cache.writeQuery({ - query: combinedOptions.query, - variables: combinedOptions.variables, - data: fetchMoreResult.data as Unmasked, - }); - } - }, + if (isCached) { + // Performing this cache update inside a cache.batch transaction ensures + // any affected cache.watch watchers are notified at most once about any + // updates. Most watchers will be using the QueryInfo class, which + // responds to notifications by calling reobserveCacheFirst to deliver + // fetchMore cache results back to this ObservableQuery. + this.queryManager.cache.batch({ + update: (cache) => { + if (updateQuery) { + cache.updateQuery( + { + query: this.query, + variables: this.variables, + returnPartialData: true, + optimistic: false, + }, + (previous) => + updateQuery(previous! as any, { + fetchMoreResult: fetchMoreResult.data as any, + variables: combinedOptions.variables as TFetchVars, + }) + ); + } else { + // If we're using a field policy instead of updateQuery, the only + // thing we need to do is write the new data to the cache using + // combinedOptions.variables (instead of this.variables, which is + // what this.updateQuery uses, because it works by abusing the + // original field value, keyed by the original variables). + cache.writeQuery({ + query: combinedOptions.query, + variables: combinedOptions.variables, + data: fetchMoreResult.data as Unmasked, + }); + } + }, - onWatchUpdated: (watch, diff) => { - if (watch.watcher === this) { - wasUpdated = true; - const lastResult = this.getCurrentResult(); - pushNotification({ - kind: "N", - source: "network", - value: { - ...lastResult, - networkStatus: - fetchMoreResult.networkStatus === NetworkStatus.error ? - NetworkStatus.ready - : fetchMoreResult.networkStatus, - // will be overwritten anyways, just here for types sake - loading: false, - data: diff.result, - dataState: - fetchMoreResult.dataState === "streaming" ? - "streaming" - : "complete", - }, - }); - } - }, - }); - } else { - // There is a possibility `lastResult` may not be set when - // `fetchMore` is called which would cause this to crash. This should - // only happen if we haven't previously reported a result. We don't - // quite know what the right behavior should be here since this block - // of code runs after the fetch result has executed on the network. - // We plan to let it crash in the meantime. - // - // If we get bug reports due to the `data` property access on - // undefined, this should give us a real-world scenario that we can - // use to test against and determine the right behavior. If we do end - // up changing this behavior, this may require, for example, an - // adjustment to the types on `updateQuery` since that function - // expects that the first argument always contains previous result - // data, but not `undefined`. - const lastResult = this.getCurrentResult(); - const data = updateQuery!(lastResult.data as Unmasked, { - fetchMoreResult: fetchMoreResult.data as Unmasked, - variables: combinedOptions.variables as TFetchVars, - }); + onWatchUpdated: (watch, diff) => { + if (watch.watcher === this) { + wasUpdated = true; + const lastResult = this.getCurrentResult(); + pushNotification({ + kind: "N", + source: "network", + value: { + ...lastResult, + networkStatus: + fetchMoreResult.networkStatus === NetworkStatus.error ? + NetworkStatus.ready + : fetchMoreResult.networkStatus, + // will be overwritten anyways, just here for types sake + loading: false, + data: diff.result, + dataState: + fetchMoreResult.dataState === "streaming" ? + "streaming" + : "complete", + }, + }); + } + }, + }); + } else { + // There is a possibility `lastResult` may not be set when + // `fetchMore` is called which would cause this to crash. This should + // only happen if we haven't previously reported a result. We don't + // quite know what the right behavior should be here since this block + // of code runs after the fetch result has executed on the network. + // We plan to let it crash in the meantime. + // + // If we get bug reports due to the `data` property access on + // undefined, this should give us a real-world scenario that we can + // use to test against and determine the right behavior. If we do end + // up changing this behavior, this may require, for example, an + // adjustment to the types on `updateQuery` since that function + // expects that the first argument always contains previous result + // data, but not `undefined`. + const lastResult = this.getCurrentResult(); + const data = updateQuery!(lastResult.data as Unmasked, { + fetchMoreResult: fetchMoreResult.data as Unmasked, + variables: combinedOptions.variables as TFetchVars, + }); - pushNotification({ - kind: "N", - value: { - ...lastResult, - networkStatus: NetworkStatus.ready, - // will be overwritten anyways, just here for types sake - loading: false, - data: data as any, - dataState: - lastResult.dataState === "streaming" ? "streaming" : "complete", - }, - source: "network", - }); - } - }, - }); + pushNotification({ + kind: "N", + value: { + ...lastResult, + networkStatus: NetworkStatus.ready, + // will be overwritten anyways, just here for types sake + loading: false, + data: data as any, + dataState: + lastResult.dataState === "streaming" ? + "streaming" + : "complete", + }, + source: "network", + }); + } + }, + }); return preventUnhandledRejection( promise From 23a21d8c858e7d967aaf4c2fca9fae28bb262226 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 18:22:30 -0600 Subject: [PATCH 240/254] Update api report --- .api-reports/api-report-core.api.md | 2 +- .api-reports/api-report.api.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.api-reports/api-report-core.api.md b/.api-reports/api-report-core.api.md index c1f15c36f34..132e1fd8b74 100644 --- a/.api-reports/api-report-core.api.md +++ b/.api-reports/api-report-core.api.md @@ -1133,7 +1133,7 @@ export type WatchQueryOptions Date: Tue, 16 Sep 2025 18:28:57 -0600 Subject: [PATCH 241/254] Add private get for cache in ObservableQuery --- src/core/ObservableQuery.ts | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts index b7114bfe9f3..8eded44e88c 100644 --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -356,6 +356,10 @@ export class ObservableQuery< return this.subject.getValue().result.networkStatus; } + private get cache() { + return this.queryManager.cache; + } + constructor({ queryManager, options, @@ -559,7 +563,7 @@ export class ObservableQuery< * @internal */ public getCacheDiff({ optimistic = true } = {}) { - return this.queryManager.cache.diff({ + return this.cache.diff({ query: this.query, variables: this.variables, returnPartialData: true, @@ -694,7 +698,7 @@ export class ObservableQuery< } }, }; - const cancelWatch = this.queryManager.cache.watch(watch); + const cancelWatch = this.cache.watch(watch); this.unsubscribeFromCache = Object.assign( () => { @@ -937,7 +941,7 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, // updates. Most watchers will be using the QueryInfo class, which // responds to notifications by calling reobserveCacheFirst to deliver // fetchMore cache results back to this ObservableQuery. - this.queryManager.cache.batch({ + this.cache.batch({ update: (cache) => { if (updateQuery) { cache.updateQuery( @@ -1202,7 +1206,7 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, ); if (newResult) { - queryManager.cache.writeQuery({ + this.cache.writeQuery({ query: this.options.query, data: newResult, variables: this.variables, From dfc1670c91d907af650c9cb8b0d2f51515c7919f Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 19:02:46 -0600 Subject: [PATCH 242/254] Let cache watch handle final result --- src/core/ObservableQuery.ts | 44 ++++++++++++++++++++++--------------- 1 file changed, 26 insertions(+), 18 deletions(-) diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts index 8eded44e88c..2a16988ac9b 100644 --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -975,24 +975,32 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, if (watch.watcher === this) { wasUpdated = true; const lastResult = this.getCurrentResult(); - pushNotification({ - kind: "N", - source: "network", - value: { - ...lastResult, - networkStatus: - fetchMoreResult.networkStatus === NetworkStatus.error ? - NetworkStatus.ready - : fetchMoreResult.networkStatus, - // will be overwritten anyways, just here for types sake - loading: false, - data: diff.result, - dataState: - fetchMoreResult.dataState === "streaming" ? - "streaming" - : "complete", - }, - }); + + // Let the cache watch from resubscribeCache handle the final + // result + if (isNetworkRequestInFlight(fetchMoreResult.networkStatus)) { + pushNotification({ + kind: "N", + source: "network", + value: { + ...lastResult, + networkStatus: + ( + fetchMoreResult.networkStatus === + NetworkStatus.error + ) ? + NetworkStatus.ready + : fetchMoreResult.networkStatus, + // will be overwritten anyways, just here for types sake + loading: false, + data: diff.result, + dataState: + fetchMoreResult.dataState === "streaming" ? + "streaming" + : "complete", + }, + }); + } } }, }); From 6ddb8f86e85c3fce30f91cb5eb2693dd649698a9 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Tue, 16 Sep 2025 19:06:34 -0600 Subject: [PATCH 243/254] Update api report --- .api-reports/api-report-core.api.md | 2 +- .api-reports/api-report.api.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.api-reports/api-report-core.api.md b/.api-reports/api-report-core.api.md index 132e1fd8b74..b1eb50951ef 100644 --- a/.api-reports/api-report-core.api.md +++ b/.api-reports/api-report-core.api.md @@ -1133,7 +1133,7 @@ export type WatchQueryOptions Date: Wed, 17 Sep 2025 15:20:22 -0600 Subject: [PATCH 244/254] Version Packages (alpha) (#12928) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Jerel Miller --- .changeset/pre.json | 8 +++++++- .size-limits.json | 8 ++++---- CHANGELOG.md | 31 +++++++++++++++++++++++++++++++ package-lock.json | 4 ++-- package.json | 2 +- 5 files changed, 45 insertions(+), 8 deletions(-) diff --git a/.changeset/pre.json b/.changeset/pre.json index 84d911ba5c1..562f7477ac3 100644 --- a/.changeset/pre.json +++ b/.changeset/pre.json @@ -6,5 +6,11 @@ "@apollo/client-graphql-codegen": "1.0.0", "@apollo/client-codemod-migrate-3-to-4": "1.0.2" }, - "changesets": [] + "changesets": [ + "cold-kiwis-give", + "funny-bats-hammer", + "little-yaks-decide", + "neat-lemons-shave", + "six-islands-drum" + ] } diff --git a/.size-limits.json b/.size-limits.json index e984d952174..34b4dcda15a 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44530, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39381, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33704, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27701 + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44574, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39359, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33705, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27700 } diff --git a/CHANGELOG.md b/CHANGELOG.md index 0711d36e851..70afc0590a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,36 @@ # @apollo/client +## 4.1.0-alpha.0 + +### Minor Changes + +- [#12923](https://github.com/apollographql/apollo-client/pull/12923) [`2aa31c7`](https://github.com/apollographql/apollo-client/commit/2aa31c718155e88814551afb14fd7a0035acc57d) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Fix an issue where deferred payloads that reteurned arrays with fewer items than the original cached array would retain items from the cached array. This change includes `@stream` arrays where stream arrays replace the cached arrays. + +- [#12926](https://github.com/apollographql/apollo-client/pull/12926) [`c7fba99`](https://github.com/apollographql/apollo-client/commit/c7fba99e16da522fdbc35b9c16cdb8df0dda4c2c) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Support the newer incremental delivery format for the `@defer` directive implemented in `graphql@17.0.0-alpha.9`. Import the `GraphQL17Alpha9Handler` to use the newer incremental delivery format with `@defer`. + + ```ts + import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; + + const client = new ApolloClient({ + // ... + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + ``` + + > [!NOTE] + > In order to use the `GraphQL17Alpha9Handler`, the GraphQL server MUST implement the newer incremental delivery format. You may see errors or unusual behavior if you use the wrong handler. If you are using Apollo Router, continue to use the `Defer20220824Handler` because Apollo Router does not yet support the newer incremental delivery format. + +- [#12918](https://github.com/apollographql/apollo-client/pull/12918) [`562e219`](https://github.com/apollographql/apollo-client/commit/562e2191a4b38e05edb3da9074e2958db3c7b6b9) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Add support for the `@stream` directive on both the `Defer20220824Handler` and the `GraphQL17Alpha2Handler`. + + > [!NOTE] + > The implementations of `@stream` differ in the delivery of incremental results between the different GraphQL spec versions. If you upgrading from the older format to the newer format, expect the timing of some incremental results to change. + +### Patch Changes + +- [#12925](https://github.com/apollographql/apollo-client/pull/12925) [`f538a83`](https://github.com/apollographql/apollo-client/commit/f538a83621e1d110286c056dd8e91611dfd9a1d3) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Fix an issue where calling `fetchMore` with `@defer` or `@stream` would not rerender incremental results as they were streamed. + +- [#12923](https://github.com/apollographql/apollo-client/pull/12923) [`01cace0`](https://github.com/apollographql/apollo-client/commit/01cace0a6d4faf79e8a4188b93c7d13c4b26d6d4) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Improve the cache data loss warning message when `existing` or `incoming` is an array. + ## 4.0.4 ### Patch Changes diff --git a/package-lock.json b/package-lock.json index 18dbd11a0c4..02d8b0bd9b3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@apollo/client", - "version": "4.0.4", + "version": "4.1.0-alpha.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@apollo/client", - "version": "4.0.4", + "version": "4.1.0-alpha.0", "hasInstallScript": true, "license": "MIT", "workspaces": [ diff --git a/package.json b/package.json index 18c08e9fdca..d8d7ef315d7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@apollo/client", - "version": "4.0.4", + "version": "4.1.0-alpha.0", "description": "A fully-featured caching GraphQL client.", "private": true, "keywords": [ From c97b145188d39d754ff098ff399a80cae5b10cc0 Mon Sep 17 00:00:00 2001 From: Lenz Weber-Tronic Date: Wed, 24 Sep 2025 18:48:07 +0200 Subject: [PATCH 245/254] Create mechanism to add experimental features to Apollo Client (#12915) * Create mechanism to add experimental features to Apollo Client * add comment, v1 * Clean up Prettier, Size-limit, and Api-Extractor --------- Co-authored-by: phryneas <4282439+phryneas@users.noreply.github.com> --- .api-reports/api-report-core.api.md | 10 +++++++++- .api-reports/api-report.api.md | 12 ++++++++++-- .changeset/olive-queens-fold.md | 5 +++++ .size-limits.json | 8 ++++---- src/core/ApolloClient.ts | 17 +++++++++++++++++ 5 files changed, 45 insertions(+), 7 deletions(-) create mode 100644 .changeset/olive-queens-fold.md diff --git a/.api-reports/api-report-core.api.md b/.api-reports/api-report-core.api.md index b1eb50951ef..ab7effb8971 100644 --- a/.api-reports/api-report-core.api.md +++ b/.api-reports/api-report-core.api.md @@ -195,6 +195,13 @@ export namespace ApolloClient { } } // (undocumented) + export interface Experiment { + // (undocumented) + (this: ApolloClient, options: ApolloClient.Options): void; + // (undocumented) + v: 1; + } + // (undocumented) export type MutateOptions = { optimisticResponse?: Unmasked> | ((vars: TVariables, { IGNORE }: { IGNORE: IgnoreModifier; @@ -231,6 +238,7 @@ export namespace ApolloClient { documentTransform?: DocumentTransform; // (undocumented) enhancedClientAwareness?: ClientAwarenessLink.EnhancedClientAwarenessOptions; + experiments?: ApolloClient.Experiment[]; incrementalHandler?: Incremental.Handler; link: ApolloLink; // (undocumented) @@ -1132,7 +1140,7 @@ export type WatchQueryOptions; link: ApolloLink; @@ -2711,8 +2719,8 @@ interface WriteContext extends ReadMergeModifyContext { // src/cache/inmemory/policies.ts:167:3 - (ae-forgotten-export) The symbol "KeySpecifier" needs to be exported by the entry point index.d.ts // src/cache/inmemory/policies.ts:167:3 - (ae-forgotten-export) The symbol "KeyArgsFunction" needs to be exported by the entry point index.d.ts // src/cache/inmemory/types.ts:134:3 - (ae-forgotten-export) The symbol "KeyFieldsFunction" needs to be exported by the entry point index.d.ts -// src/core/ApolloClient.ts:159:5 - (ae-forgotten-export) The symbol "IgnoreModifier" needs to be exported by the entry point index.d.ts -// src/core/ApolloClient.ts:353:5 - (ae-forgotten-export) The symbol "NextFetchPolicyContext" needs to be exported by the entry point index.d.ts +// src/core/ApolloClient.ts:168:5 - (ae-forgotten-export) The symbol "IgnoreModifier" needs to be exported by the entry point index.d.ts +// src/core/ApolloClient.ts:362:5 - (ae-forgotten-export) The symbol "NextFetchPolicyContext" needs to be exported by the entry point index.d.ts // src/core/ObservableQuery.ts:368:5 - (ae-forgotten-export) The symbol "QueryManager" needs to be exported by the entry point index.d.ts // src/core/QueryManager.ts:180:5 - (ae-forgotten-export) The symbol "MutationStoreValue" needs to be exported by the entry point index.d.ts // src/local-state/LocalState.ts:147:5 - (ae-forgotten-export) The symbol "LocalState" needs to be exported by the entry point index.d.ts diff --git a/.changeset/olive-queens-fold.md b/.changeset/olive-queens-fold.md new file mode 100644 index 00000000000..ea3abc75a0c --- /dev/null +++ b/.changeset/olive-queens-fold.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +Create mechanism to add experimental features to Apollo Client diff --git a/.size-limits.json b/.size-limits.json index 34b4dcda15a..5dd163c1321 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44574, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39359, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33705, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27700 + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44542, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39461, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33696, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27707 } diff --git a/src/core/ApolloClient.ts b/src/core/ApolloClient.ts index 730012610bd..0089eb2b32e 100644 --- a/src/core/ApolloClient.ts +++ b/src/core/ApolloClient.ts @@ -132,6 +132,15 @@ export declare namespace ApolloClient { * queries. */ incrementalHandler?: Incremental.Handler; + + /** + * @experimental + * Allows passing in "experiments", experimental features that might one day + * become part of Apollo Client's core functionality. + * Keep in mind that these features might change the core of Apollo Client. + * Do not pass in experiments that are not provided by Apollo. + */ + experiments?: ApolloClient.Experiment[]; } interface DevtoolsOptions { @@ -610,6 +619,11 @@ export declare namespace ApolloClient { variables?: TVariables; } } + + export interface Experiment { + (this: ApolloClient, options: ApolloClient.Options): void; + v: 1; + } } /** @@ -708,6 +722,7 @@ export class ApolloClient { dataMasking, link, incrementalHandler = new NotImplementedHandler(), + experiments = [], } = options; this.link = link; @@ -759,6 +774,8 @@ export class ApolloClient { } if (this.devtoolsConfig.enabled) this.connectToDevTools(); + + experiments.forEach((experiment) => experiment.call(this, options)); } private connectToDevTools() { From 54ab6d994692dad9f06d3d0b84c84d021d126577 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 26 Sep 2025 08:57:28 -0600 Subject: [PATCH 246/254] More robust handling of local state default value when read function is defined (#12934) --- .api-reports/api-report-cache.api.md | 3 + .api-reports/api-report-local-state.api.md | 4 +- .api-reports/api-report.api.md | 12 +- .changeset/flat-worms-notice.md | 7 + .changeset/perfect-crabs-smile.md | 5 + .changeset/shaggy-islands-yell.md | 5 + .changeset/unlucky-cooks-rhyme.md | 7 + .size-limits.json | 8 +- src/__tests__/local-state/general.ts | 155 ++++++++++++- src/cache/core/cache.ts | 22 ++ src/cache/inmemory/inMemoryCache.ts | 4 + src/core/QueryManager.ts | 12 +- src/local-state/LocalState.ts | 41 +++- .../__tests__/LocalState/aliases.test.ts | 6 + .../__tests__/LocalState/async.test.ts | 3 + .../__tests__/LocalState/base.test.ts | 207 +++++++++++++++++- .../__tests__/LocalState/cache.test.ts | 39 +++- .../__tests__/LocalState/context.test.ts | 5 + .../__tests__/LocalState/errors.test.ts | 12 + .../LocalState/forcedResolvers.test.ts | 5 + .../__tests__/LocalState/fragments.test.ts | 6 + .../__tests__/LocalState/partialData.test.ts | 2 + .../__tests__/LocalState/rootValue.test.ts | 2 + .../LocalState/subscriptions.test.ts | 3 + .../__tests__/LocalState/testUtils.ts | 7 + 25 files changed, 554 insertions(+), 28 deletions(-) create mode 100644 .changeset/flat-worms-notice.md create mode 100644 .changeset/perfect-crabs-smile.md create mode 100644 .changeset/shaggy-islands-yell.md create mode 100644 .changeset/unlucky-cooks-rhyme.md diff --git a/.api-reports/api-report-cache.api.md b/.api-reports/api-report-cache.api.md index 0e7551ac5dc..3ff92332c84 100644 --- a/.api-reports/api-report-cache.api.md +++ b/.api-reports/api-report-cache.api.md @@ -94,6 +94,7 @@ export abstract class ApolloCache { abstract removeOptimistic(id: string): void; // (undocumented) abstract reset(options?: Cache_2.ResetOptions): Promise; + resolvesClientField?(typename: string, fieldName: string): boolean; abstract restore(serializedState: unknown): this; // (undocumented) transformDocument(document: DocumentNode): DocumentNode; @@ -544,6 +545,8 @@ export class InMemoryCache extends ApolloCache { // (undocumented) reset(options?: Cache_2.ResetOptions): Promise; // (undocumented) + resolvesClientField(typename: string, fieldName: string): boolean; + // (undocumented) restore(data: NormalizedCacheObject): this; // (undocumented) retain(rootId: string, optimistic?: boolean): number; diff --git a/.api-reports/api-report-local-state.api.md b/.api-reports/api-report-local-state.api.md index 747567da303..4a633e1a00f 100644 --- a/.api-reports/api-report-local-state.api.md +++ b/.api-reports/api-report-local-state.api.md @@ -14,6 +14,7 @@ import type { NoInfer as NoInfer_2 } from '@apollo/client/utilities/internal'; import type { OperationVariables } from '@apollo/client'; import type { RemoveIndexSignature } from '@apollo/client/utilities/internal'; import type { TypedDocumentNode } from '@apollo/client'; +import type { WatchQueryFetchPolicy } from '@apollo/client'; // @public (undocumented) type InferContextValueFromResolvers = TResolvers extends { @@ -91,7 +92,7 @@ export class LocalState({ document, client, context, remoteResult, variables, onlyRunForcedResolvers, returnPartialData, }: { + execute({ document, client, context, remoteResult, variables, onlyRunForcedResolvers, returnPartialData, fetchPolicy, }: { document: DocumentNode | TypedDocumentNode; client: ApolloClient; context: DefaultContext | undefined; @@ -99,6 +100,7 @@ export class LocalState>; // (undocumented) getExportedVariables({ document, client, context, variables, }: { diff --git a/.api-reports/api-report.api.md b/.api-reports/api-report.api.md index a00a78bf4b1..ddcaa569a04 100644 --- a/.api-reports/api-report.api.md +++ b/.api-reports/api-report.api.md @@ -95,6 +95,7 @@ export abstract class ApolloCache { abstract removeOptimistic(id: string): void; // (undocumented) abstract reset(options?: Cache_2.ResetOptions): Promise; + resolvesClientField?(typename: string, fieldName: string): boolean; abstract restore(serializedState: unknown): this; // (undocumented) transformDocument(document: DocumentNode): DocumentNode; @@ -1381,6 +1382,8 @@ export class InMemoryCache extends ApolloCache { // (undocumented) reset(options?: Cache_2.ResetOptions): Promise; // (undocumented) + resolvesClientField(typename: string, fieldName: string): boolean; + // (undocumented) restore(data: NormalizedCacheObject): this; // (undocumented) retain(rootId: string, optimistic?: boolean): number; @@ -1577,7 +1580,7 @@ class LocalState({ document, client, context, remoteResult, variables, onlyRunForcedResolvers, returnPartialData, }: { + execute({ document, client, context, remoteResult, variables, onlyRunForcedResolvers, returnPartialData, fetchPolicy, }: { document: DocumentNode | TypedDocumentNode; client: ApolloClient; context: DefaultContext | undefined; @@ -1585,6 +1588,7 @@ class LocalState>; // (undocumented) getExportedVariables({ document, client, context, variables, }: { @@ -2723,9 +2727,9 @@ interface WriteContext extends ReadMergeModifyContext { // src/core/ApolloClient.ts:362:5 - (ae-forgotten-export) The symbol "NextFetchPolicyContext" needs to be exported by the entry point index.d.ts // src/core/ObservableQuery.ts:368:5 - (ae-forgotten-export) The symbol "QueryManager" needs to be exported by the entry point index.d.ts // src/core/QueryManager.ts:180:5 - (ae-forgotten-export) The symbol "MutationStoreValue" needs to be exported by the entry point index.d.ts -// src/local-state/LocalState.ts:147:5 - (ae-forgotten-export) The symbol "LocalState" needs to be exported by the entry point index.d.ts -// src/local-state/LocalState.ts:200:7 - (ae-forgotten-export) The symbol "LocalState" needs to be exported by the entry point index.d.ts -// src/local-state/LocalState.ts:243:7 - (ae-forgotten-export) The symbol "LocalState" needs to be exported by the entry point index.d.ts +// src/local-state/LocalState.ts:149:5 - (ae-forgotten-export) The symbol "LocalState" needs to be exported by the entry point index.d.ts +// src/local-state/LocalState.ts:202:7 - (ae-forgotten-export) The symbol "LocalState" needs to be exported by the entry point index.d.ts +// src/local-state/LocalState.ts:245:7 - (ae-forgotten-export) The symbol "LocalState" needs to be exported by the entry point index.d.ts // (No @packageDocumentation comment for this package) diff --git a/.changeset/flat-worms-notice.md b/.changeset/flat-worms-notice.md new file mode 100644 index 00000000000..6833bc32800 --- /dev/null +++ b/.changeset/flat-worms-notice.md @@ -0,0 +1,7 @@ +--- +"@apollo/client": minor +--- + +Don't set the fallback value of a `@client` field to `null` when a `read` function is defined. Instead the `read` function will be called with an `existing` value of `undefined` to allow default arguments to be used to set the returned value. + +When a `read` function is not defined nor is there a defined resolver for the field, warn and set the value to `null` only in that instance. diff --git a/.changeset/perfect-crabs-smile.md b/.changeset/perfect-crabs-smile.md new file mode 100644 index 00000000000..d85e58e885f --- /dev/null +++ b/.changeset/perfect-crabs-smile.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +Ensure `LocalState` doesn't try to read from the cache when using a `no-cache` fetch policy. diff --git a/.changeset/shaggy-islands-yell.md b/.changeset/shaggy-islands-yell.md new file mode 100644 index 00000000000..0056f773493 --- /dev/null +++ b/.changeset/shaggy-islands-yell.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +Warn when using a `no-cache` fetch policy without a local resolver defined. `no-cache` queries do not read or write to the cache which meant `no-cache` queries are silently incomplete when the `@client` field value was handled by a cache `read` function. diff --git a/.changeset/unlucky-cooks-rhyme.md b/.changeset/unlucky-cooks-rhyme.md new file mode 100644 index 00000000000..c73a23c930c --- /dev/null +++ b/.changeset/unlucky-cooks-rhyme.md @@ -0,0 +1,7 @@ +--- +"@apollo/client": minor +--- + +Add an abstract `resolvesClientField` function to `ApolloCache` that can be used by caches to tell `LocalState` if it can resolve a `@client` field when a local resolver is not defined. + +`LocalState` will emit a warning and set a fallback value of `null` when no local resolver is defined and `resolvesClientField` returns `false`, or isn't defined. Returning `true` from `resolvesClientField` signals that a mechanism in the cache will set the field value. In this case, `LocalState` won't set the field value. diff --git a/.size-limits.json b/.size-limits.json index 5dd163c1321..7bfe2604146 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44542, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39461, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33696, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27707 + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44753, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39420, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33901, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27727 } diff --git a/src/__tests__/local-state/general.ts b/src/__tests__/local-state/general.ts index 69281772d3b..6771eab00c5 100644 --- a/src/__tests__/local-state/general.ts +++ b/src/__tests__/local-state/general.ts @@ -26,6 +26,13 @@ import { } from "@apollo/client/testing/internal"; import { InvariantError } from "@apollo/client/utilities/invariant"; +const WARNINGS = { + MISSING_RESOLVER: + "Could not find a resolver for the '%s' field nor does the cache resolve the field. The field value has been set to `null`. Either define a resolver for the field or ensure the cache can resolve the value, for example, by adding a 'read' function to a field policy in 'InMemoryCache'.", + NO_CACHE: + "The '%s' field resolves the value from the cache, for example from a 'read' function, but a 'no-cache' fetch policy was used. The field value has been set to `null`. Either define a local resolver or use a fetch policy that uses the cache to ensure the field is resolved correctly.", +}; + describe("General functionality", () => { test("should not impact normal non-@client use", async () => { const query = gql` @@ -632,7 +639,7 @@ describe("Cache manipulation", () => { }); expect(read).toHaveBeenCalledTimes(1); - expect(read).toHaveBeenCalledWith(null, expect.anything()); + expect(read).toHaveBeenCalledWith(undefined, expect.anything()); expect(console.warn).not.toHaveBeenCalled(); }); }); @@ -1510,3 +1517,149 @@ test("throws when executing subscriptions with client fields when local state is ) ); }); + +test.each(["cache-first", "network-only"] as const)( + "sets existing value of `@client` field to undefined when read function is present", + async (fetchPolicy) => { + const query = gql` + query GetUser { + user { + firstName @client + lastName + } + } + `; + + const read = jest.fn((value = "Fallback") => value); + const client = new ApolloClient({ + cache: new InMemoryCache({ + typePolicies: { + User: { + fields: { + firstName: { + read, + }, + }, + }, + }, + }), + link: new ApolloLink(() => { + return of({ + data: { user: { __typename: "User", lastName: "Smith" } }, + }).pipe(delay(10)); + }), + localState: new LocalState(), + }); + + await expect( + client.query({ query, fetchPolicy }) + ).resolves.toStrictEqualTyped({ + data: { + user: { __typename: "User", firstName: "Fallback", lastName: "Smith" }, + }, + }); + + expect(read).toHaveBeenCalledTimes(1); + expect(read).toHaveBeenCalledWith(undefined, expect.anything()); + } +); + +test("sets existing value of `@client` field to null and warns when using no-cache with read function", async () => { + using _ = spyOnConsole("warn"); + const query = gql` + query GetUser { + user { + firstName @client + lastName + } + } + `; + + const read = jest.fn((value) => value ?? "Fallback"); + const client = new ApolloClient({ + cache: new InMemoryCache({ + typePolicies: { + User: { + fields: { + firstName: { + read, + }, + }, + }, + }, + }), + link: new ApolloLink(() => { + return of({ + data: { user: { __typename: "User", lastName: "Smith" } }, + }).pipe(delay(10)); + }), + localState: new LocalState(), + }); + + await expect( + client.query({ query, fetchPolicy: "no-cache" }) + ).resolves.toStrictEqualTyped({ + data: { + user: { __typename: "User", firstName: null, lastName: "Smith" }, + }, + }); + + expect(read).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + WARNINGS.NO_CACHE, + "User.firstName" + ); +}); + +test("sets existing value of `@client` field to null and warns when merge function but not read function is present", async () => { + using _ = spyOnConsole("warn"); + const query = gql` + query GetUser { + user { + firstName @client + lastName + } + } + `; + + const merge = jest.fn(() => "Fallback"); + const client = new ApolloClient({ + cache: new InMemoryCache({ + typePolicies: { + User: { + fields: { + firstName: { + merge, + }, + }, + }, + }, + }), + link: new ApolloLink(() => { + return of({ + data: { user: { __typename: "User", lastName: "Smith" } }, + }).pipe(delay(10)); + }), + localState: new LocalState(), + }); + + await expect(client.query({ query })).resolves.toStrictEqualTyped({ + data: { + user: { + __typename: "User", + firstName: "Fallback", + lastName: "Smith", + }, + }, + }); + + expect(merge).toHaveBeenCalledTimes(1); + expect(merge).toHaveBeenCalledWith(undefined, null, expect.anything()); + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + WARNINGS.MISSING_RESOLVER, + "User.firstName" + ); +}); diff --git a/src/cache/core/cache.ts b/src/cache/core/cache.ts index ac80f48f6cd..3e98626633d 100644 --- a/src/cache/core/cache.ts +++ b/src/cache/core/cache.ts @@ -178,6 +178,28 @@ export abstract class ApolloCache { return null; } + // Local state API + + /** + * Determines whether a `@client` field can be resolved by the cache. Used + * when `LocalState` does not have a local resolver that can resolve the + * field. + * + * @remarks Cache implementations should return `true` if a mechanism in the + * cache is expected to provide a value for the field. `LocalState` will set + * the value of the field to `undefined` in order for the cache to handle it. + * + * Cache implementations should return `false` to indicate that it cannot + * handle resolving the field (either because it doesn't have a mechanism to + * do so, or because the user hasn't provided enough information to resolve + * the field). Returning `false` will emit a warning and set the value of the + * field to `null`. + * + * A cache that doesn't implement `resolvesClientField` will be treated the + * same as returning `false`. + */ + public resolvesClientField?(typename: string, fieldName: string): boolean; + // Transactional API // The batch method is intended to replace/subsume both performTransaction diff --git a/src/cache/inmemory/inMemoryCache.ts b/src/cache/inmemory/inMemoryCache.ts index f7d90e1169e..102ee0b2f51 100644 --- a/src/cache/inmemory/inMemoryCache.ts +++ b/src/cache/inmemory/inMemoryCache.ts @@ -530,6 +530,10 @@ export class InMemoryCache extends ApolloCache { return this.config.fragments?.lookup(fragmentName) || null; } + public resolvesClientField(typename: string, fieldName: string): boolean { + return !!this.policies.getReadFunction(typename, fieldName); + } + protected broadcastWatches(options?: BroadcastOptions) { if (!this.txCount) { this.watches.forEach((c) => this.maybeBroadcastWatch(c, options)); diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts index 7b239329eb4..83a2bcccc39 100644 --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -331,6 +331,7 @@ export class QueryManager { optimisticResponse: isOptimistic ? optimisticResponse : void 0, }, variables, + fetchPolicy, {}, false ) @@ -748,7 +749,7 @@ export class QueryManager { ): SubscriptionObservable> { let { query, variables } = options; const { - fetchPolicy, + fetchPolicy = "cache-first", errorPolicy = "none", context = {}, extensions = {}, @@ -785,6 +786,7 @@ export class QueryManager { query, context, variables, + fetchPolicy, extensions ); @@ -864,7 +866,8 @@ export class QueryManager { private getObservableFromLink( query: DocumentNode, context: DefaultContext | undefined, - variables?: OperationVariables, + variables: OperationVariables, + fetchPolicy: WatchQueryFetchPolicy, extensions?: Record, // Prefer context.queryDeduplication if specified. deduplication: boolean = context?.queryDeduplication ?? @@ -994,6 +997,7 @@ export class QueryManager { remoteResult: result as FormattedExecutionResult, context, variables, + fetchPolicy, }) ); }) @@ -1041,7 +1045,8 @@ export class QueryManager { return this.getObservableFromLink( linkDocument, options.context, - options.variables + options.variables, + options.fetchPolicy ).observable.pipe( map((incoming) => { // Use linkDocument rather than queryInfo.document so the @@ -1602,6 +1607,7 @@ export class QueryManager { variables, onlyRunForcedResolvers: true, returnPartialData: true, + fetchPolicy, }).then( (resolved): QueryNotification.FromCache => ({ kind: "N", diff --git a/src/local-state/LocalState.ts b/src/local-state/LocalState.ts index f25c7c9c873..49601ba59ec 100644 --- a/src/local-state/LocalState.ts +++ b/src/local-state/LocalState.ts @@ -22,6 +22,7 @@ import type { ErrorLike, OperationVariables, TypedDocumentNode, + WatchQueryFetchPolicy, } from "@apollo/client"; import { cacheSlot } from "@apollo/client/cache"; import { LocalStateError, toErrorLike } from "@apollo/client/errors"; @@ -63,6 +64,7 @@ interface ExecContext { exportedVariableDefs: Record; diff: Cache.DiffResult; returnPartialData: boolean; + fetchPolicy?: WatchQueryFetchPolicy; } /** @@ -336,6 +338,7 @@ export class LocalState< variables = {} as TVariables, onlyRunForcedResolvers = false, returnPartialData = false, + fetchPolicy, }: { document: DocumentNode | TypedDocumentNode; client: ApolloClient; @@ -345,6 +348,7 @@ export class LocalState< variables: TVariables | undefined; onlyRunForcedResolvers?: boolean; returnPartialData?: boolean; + fetchPolicy: WatchQueryFetchPolicy; }): Promise> { if (__DEV__) { invariant( @@ -372,12 +376,15 @@ export class LocalState< const rootValue = remoteResult ? remoteResult.data : {}; - const diff = client.cache.diff>({ - query: toQueryOperation(document), - variables, - returnPartialData: true, - optimistic: false, - }); + const diff: Cache.DiffResult> = + fetchPolicy === "no-cache" ? + { result: null, complete: false } + : client.cache.diff>({ + query: toQueryOperation(document), + variables, + returnPartialData: true, + optimistic: false, + }); const requestContext = { ...client.defaultContext, ...context }; const execContext: ExecContext = { @@ -401,6 +408,7 @@ export class LocalState< exportedVariableDefs, diff, returnPartialData, + fetchPolicy, }; const localResult = await this.resolveSelectionSet( @@ -676,9 +684,10 @@ export class LocalState< variables, operationDefinition, phase, - returnPartialData, onlyRunForcedResolvers, + fetchPolicy, } = execContext; + let { returnPartialData } = execContext; const isRootField = parentSelectionSet === operationDefinition.selectionSet; const fieldName = field.name.value; const typename = @@ -709,7 +718,25 @@ export class LocalState< return fieldFromCache; } + if (client.cache.resolvesClientField?.(typename, fieldName)) { + if (fetchPolicy === "no-cache") { + invariant.warn( + "The '%s' field resolves the value from the cache, for example from a 'read' function, but a 'no-cache' fetch policy was used. The field value has been set to `null`. Either define a local resolver or use a fetch policy that uses the cache to ensure the field is resolved correctly.", + resolverName + ); + return null; + } + + // assume the cache will handle returning the correct value + returnPartialData = true; + return; + } + if (!returnPartialData) { + invariant.warn( + "Could not find a resolver for the '%s' field nor does the cache resolve the field. The field value has been set to `null`. Either define a resolver for the field or ensure the cache can resolve the value, for example, by adding a 'read' function to a field policy in 'InMemoryCache'.", + resolverName + ); return null; } } diff --git a/src/local-state/__tests__/LocalState/aliases.test.ts b/src/local-state/__tests__/LocalState/aliases.test.ts index 197dcc48c78..b88b438c370 100644 --- a/src/local-state/__tests__/LocalState/aliases.test.ts +++ b/src/local-state/__tests__/LocalState/aliases.test.ts @@ -41,6 +41,7 @@ test("resolves @client fields mixed with aliased server fields", async () => { context: {}, remoteResult, variables: {}, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -81,6 +82,7 @@ test("resolves aliased @client fields", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { fie: { bar: true, __typename: "Foo" } }, @@ -137,6 +139,7 @@ test("resolves deeply nested aliased @client fields", async () => { context: {}, remoteResult, variables: {}, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -190,6 +193,7 @@ test("respects aliases for *nested fields* on the @client-tagged node", async () context: {}, remoteResult, variables: {}, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -231,6 +235,7 @@ test("does not confuse fields aliased to each other", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -269,6 +274,7 @@ test("does not confuse fields aliased to each other with boolean values", async context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { diff --git a/src/local-state/__tests__/LocalState/async.test.ts b/src/local-state/__tests__/LocalState/async.test.ts index 5d4ff21f0bf..a8079360b3f 100644 --- a/src/local-state/__tests__/LocalState/async.test.ts +++ b/src/local-state/__tests__/LocalState/async.test.ts @@ -33,6 +33,7 @@ test("supports async @client resolvers", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { isLoggedIn: true }, @@ -130,6 +131,7 @@ test("handles nested asynchronous @client resolvers", async () => { context: {}, variables: { id: developerId }, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -201,6 +203,7 @@ test("supports async @client resolvers mixed with remotely resolved data", async context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { diff --git a/src/local-state/__tests__/LocalState/base.test.ts b/src/local-state/__tests__/LocalState/base.test.ts index cc985217082..6fbbb8377a3 100644 --- a/src/local-state/__tests__/LocalState/base.test.ts +++ b/src/local-state/__tests__/LocalState/base.test.ts @@ -4,7 +4,7 @@ import { LocalState } from "@apollo/client/local-state"; import { spyOnConsole } from "@apollo/client/testing/internal"; import { InvariantError } from "@apollo/client/utilities/invariant"; -import { gql } from "./testUtils.js"; +import { gql, WARNINGS } from "./testUtils.js"; test("runs resolvers for @client queries", async () => { const document = gql` @@ -34,6 +34,7 @@ test("runs resolvers for @client queries", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: true } }, @@ -69,6 +70,7 @@ test("can add resolvers after LocalState is instantiated", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: true } }, @@ -108,6 +110,7 @@ test("handles queries with a mix of @client and server fields", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -165,6 +168,7 @@ test("runs resolvers for deeply nested @client fields", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -214,6 +218,7 @@ test("has access to query variables in @client resolvers", async () => { context: {}, variables: { id: 1 }, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: 1 } }, @@ -266,6 +271,7 @@ test("combines local @client resolver results with server results, for the same context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -309,6 +315,7 @@ test("handles resolvers that return booleans", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { isInCart: false }, @@ -351,6 +358,7 @@ test("does not run resolvers without @client directive", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -405,6 +413,7 @@ test("does not run resolvers without @client directive with nested field", async context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -462,6 +471,7 @@ test("allows child resolvers from a parent resolved field from a local resolver" context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -518,6 +528,7 @@ test("can use remote result to resolve @client field", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -567,13 +578,14 @@ test("throws error when query does not contain client fields", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).rejects.toEqual( new InvariantError("Expected document to contain `@client` fields.") ); }); -test("does not warn when a resolver is missing for an `@client` field", async () => { +test("warns and sets value to null when a resolver is missing for an `@client` field and a read function is not defined when using InMemoryCache", async () => { using _ = spyOnConsole("warn"); const document = gql` query { @@ -595,13 +607,57 @@ test("does not warn when a resolver is missing for an `@client` field", async () context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: null } }); + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + WARNINGS.MISSING_RESOLVER, + "Query.foo" + ); +}); + +test("does not warn when read function is defined for a `@client` field when using InMemoryCache", async () => { + using _ = spyOnConsole("warn"); + const document = gql` + query { + foo @client + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + foo: { + read: () => "Bar", + }, + }, + }, + }, + }), + link: ApolloLink.empty(), + }); + + const localState = new LocalState(); + + await expect( + localState.execute({ + document, + client, + context: {}, + variables: {}, + remoteResult: undefined, + fetchPolicy: "cache-first", + }) + ).resolves.toStrictEqualTyped({ data: { foo: "Bar" } }); + expect(console.warn).not.toHaveBeenCalled(); }); -test("does not warn for client child fields of a server field", async () => { +test("warns and sets value to null for client child fields of a server field with no resolver or read function", async () => { using _ = spyOnConsole("warn"); const document = gql` query { @@ -625,14 +681,149 @@ test("does not warn for client child fields of a server field", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: null } }, }); + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + WARNINGS.MISSING_RESOLVER, + "Foo.bar" + ); +}); + +test("does not warn when a read function is defined for a child `@client` field from a server field when using InMemoryCache", async () => { + using _ = spyOnConsole("warn"); + const document = gql` + query { + foo { + bar @client + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache({ + typePolicies: { + Foo: { + fields: { + bar: { + read: () => "Baz", + }, + }, + }, + }, + }), + link: ApolloLink.empty(), + }); + + const localState = new LocalState(); + + const remoteResult = { data: { foo: { __typename: "Foo" } } }; + await expect( + localState.execute({ + document, + client, + context: {}, + variables: {}, + remoteResult, + fetchPolicy: "cache-first", + }) + ).resolves.toStrictEqualTyped({ + // The `bar` field is not so that the cache can fill in the field from the + // read function. + data: { foo: { __typename: "Foo" } }, + }); + expect(console.warn).not.toHaveBeenCalled(); }); +test("warns when using a no-cache query with a read function but no resolver function", async () => { + using _ = spyOnConsole("warn"); + const document = gql` + query { + foo @client + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + foo: { + read: () => "bar", + }, + }, + }, + }, + }), + link: ApolloLink.empty(), + }); + + const localState = new LocalState(); + + await expect( + localState.execute({ + document, + client, + context: {}, + variables: {}, + remoteResult: undefined, + fetchPolicy: "no-cache", + }) + ).resolves.toStrictEqualTyped({ data: { foo: null } }); + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith(WARNINGS.NO_CACHE, "Query.foo"); +}); + +test("warns when using a no-cache query with a read function but no resolver function on child @client field", async () => { + using _ = spyOnConsole("warn"); + const document = gql` + query { + foo { + bar @client + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache({ + typePolicies: { + Foo: { + fields: { + bar: { + read: () => "baz", + }, + }, + }, + }, + }), + link: ApolloLink.empty(), + }); + + const localState = new LocalState(); + + await expect( + localState.execute({ + document, + client, + context: {}, + variables: {}, + remoteResult: { data: { foo: { __typename: "Foo" } } }, + fetchPolicy: "no-cache", + }) + ).resolves.toStrictEqualTyped({ + data: { foo: { __typename: "Foo", bar: null } }, + }); + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith(WARNINGS.NO_CACHE, "Foo.bar"); +}); + test("warns when a resolver returns undefined and sets value to null", async () => { using _ = spyOnConsole("warn"); const document = gql` @@ -661,6 +852,7 @@ test("warns when a resolver returns undefined and sets value to null", async () context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: null } }); @@ -701,6 +893,7 @@ test("warns if a parent resolver omits a field with no child resolver", async () context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: true, baz: null } }, @@ -745,6 +938,7 @@ test("warns if a parent resolver omits a field and child has @client field", asy context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: true, baz: null } }, @@ -791,6 +985,7 @@ test("adds an error when the __typename cannot be resolved", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: null }, @@ -844,6 +1039,7 @@ test("can return more data than needed in resolver which is accessible by child context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: "random" } }, @@ -882,6 +1078,7 @@ test("does not execute child resolver when parent is null", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { currentUser: null }, @@ -929,6 +1126,7 @@ test("does not execute root scalar resolver data when remote data returns null", context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: null, @@ -979,6 +1177,7 @@ test("does not run object resolver when remote data returns null", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: null, @@ -1037,6 +1236,7 @@ test("does not run root resolvers when multiple client fields are defined when r context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: null, @@ -1084,6 +1284,7 @@ test("does not execute resolver if client field is a child of a server field whe context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: null, diff --git a/src/local-state/__tests__/LocalState/cache.test.ts b/src/local-state/__tests__/LocalState/cache.test.ts index 88c9d9abe82..ad8a8d4a319 100644 --- a/src/local-state/__tests__/LocalState/cache.test.ts +++ b/src/local-state/__tests__/LocalState/cache.test.ts @@ -7,7 +7,7 @@ import { import { LocalState } from "@apollo/client/local-state"; import { spyOnConsole } from "@apollo/client/testing/internal"; -import { gql } from "./testUtils.js"; +import { gql, WARNINGS } from "./testUtils.js"; test("can write to the cache with a mutation", async () => { const query = gql` @@ -45,6 +45,7 @@ test("can write to the cache with a mutation", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { start: true } }); @@ -104,6 +105,7 @@ test("can write to the cache with a mutation using an ID", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { start: true } }); @@ -174,6 +176,7 @@ test("does not overwrite __typename when writing to the cache with an id", async context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { start: true } }); @@ -214,6 +217,7 @@ test("reads from the cache on a root scalar field by default if a resolver is no context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { count: 10 } }); }); @@ -253,6 +257,7 @@ test("reads from the cache on a root object field by default if a resolver is no context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { user: { __typename: "User", id: 1, name: "Test User" } }, @@ -292,6 +297,7 @@ test("handles read functions for root scalar field from cache if resolver is not context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { count: 10 } }); }); @@ -332,13 +338,14 @@ test("handles read functions for root object field from cache if resolver is not context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { user: { __typename: "User", id: 1, name: "Test User" } }, }); }); -test("does not warn if resolver is not defined if cache does not have value", async () => { +test("warns if resolver or read function isn't defined if cache does not have value", async () => { using _ = spyOnConsole("warn"); const document = gql` query { @@ -360,10 +367,15 @@ test("does not warn if resolver is not defined if cache does not have value", as context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { count: null } }); - expect(console.warn).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + WARNINGS.MISSING_RESOLVER, + "Query.count" + ); }); test("reads from the cache on a nested scalar field by default if a resolver is not defined", async () => { @@ -401,6 +413,7 @@ test("reads from the cache on a nested scalar field by default if a resolver is context: {}, variables: {}, remoteResult: { data: { user: { __typename: "User", id: 1 } } }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { user: { __typename: "User", id: 1, isLoggedIn: true } }, @@ -461,6 +474,7 @@ test("reads from the cache with a read function on a nested scalar field if a re remoteResult: { data: { user: { __typename: "User", id: 1 } }, }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { user: { __typename: "User", id: 1, isLoggedIn: true } }, @@ -511,6 +525,7 @@ test("reads from the cache on a nested object field by default if a resolver is remoteResult: { data: { user: { __typename: "User", id: 1 } }, }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -580,6 +595,7 @@ test("reads from the cache with a read function on a nested object field by defa remoteResult: { data: { user: { __typename: "User", id: 1 } }, }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -647,6 +663,7 @@ test("reads from the cache on a nested client field on a non-normalized object", remoteResult: { data: { user: { __typename: "User" } }, }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -697,6 +714,7 @@ test("does not confuse field missing resolver with root field of same name on a remoteResult: { data: { user: { __typename: "User", id: 1 } }, }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -709,7 +727,11 @@ test("does not confuse field missing resolver with root field of same name on a }, }); - expect(console.warn).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + WARNINGS.MISSING_RESOLVER, + "User.count" + ); }); test("does not confuse field missing resolver with root field of same name on a non-normalized record", async () => { @@ -750,6 +772,7 @@ test("does not confuse field missing resolver with root field of same name on a remoteResult: { data: { user: { __typename: "User" } }, }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -761,7 +784,11 @@ test("does not confuse field missing resolver with root field of same name on a }, }); - expect(console.warn).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + WARNINGS.MISSING_RESOLVER, + "User.count" + ); }); test("warns on undefined value if partial data is written to the cache for an object client field", async () => { @@ -814,6 +841,7 @@ test("warns on undefined value if partial data is written to the cache for an ob remoteResult: { data: { user: { __typename: "User", id: 1 } }, }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -880,6 +908,7 @@ test("uses a written cache value from a nested client field from parent resolver context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { user: { __typename: "User", id: 1, name: "Test User" } }, diff --git a/src/local-state/__tests__/LocalState/context.test.ts b/src/local-state/__tests__/LocalState/context.test.ts index 4f48d68fc26..d3d870f5c9c 100644 --- a/src/local-state/__tests__/LocalState/context.test.ts +++ b/src/local-state/__tests__/LocalState/context.test.ts @@ -34,6 +34,7 @@ test("passes client in context to resolvers", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: 1 } }, @@ -85,6 +86,7 @@ test("can access request context in resolvers", async () => { context: { id: 1 }, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: 1 } }, @@ -134,6 +136,7 @@ test("can access phase in resolver context", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: "resolve" } }, @@ -174,6 +177,7 @@ test("can use custom context function used as request context", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: true } }, @@ -220,6 +224,7 @@ test("context function can merge request context and custom context", async () = context: { isRequestBarEnabled: true }, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: true } }, diff --git a/src/local-state/__tests__/LocalState/errors.test.ts b/src/local-state/__tests__/LocalState/errors.test.ts index 5b4a19e4329..3cd699924ee 100644 --- a/src/local-state/__tests__/LocalState/errors.test.ts +++ b/src/local-state/__tests__/LocalState/errors.test.ts @@ -36,6 +36,7 @@ test("handles errors thrown in a resolver", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: null }, @@ -88,6 +89,7 @@ test("handles errors thrown in a child resolver", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: null } }, @@ -146,6 +148,7 @@ test("adds errors for each field that throws errors", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: null, baz: null, qux: true } }, @@ -208,6 +211,7 @@ test("handles errors thrown in a child resolver from parent array", async () => context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -283,6 +287,7 @@ test("handles errors thrown in a child resolver for an array from a single item" context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -339,6 +344,7 @@ test("serializes a thrown GraphQLError and merges extensions", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: null }, @@ -393,6 +399,7 @@ test("overwrites localState extension from thrown GraphQLError if provided", asy context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: null }, @@ -452,6 +459,7 @@ test("concatenates client errors with server errors", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: null, baz: { __typename: "Baz", qux: null } }, @@ -502,6 +510,7 @@ test("handles errors thrown in async resolvers", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: null }, @@ -551,6 +560,7 @@ test("handles rejected promises returned in async resolvers", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: null }, @@ -614,6 +624,7 @@ test("handles errors thrown for resolvers on fields inside fragments", async () context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -671,6 +682,7 @@ test("handles remote errors with no local resolver errors", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { diff --git a/src/local-state/__tests__/LocalState/forcedResolvers.test.ts b/src/local-state/__tests__/LocalState/forcedResolvers.test.ts index eb75a0845a2..68a4f3c61f5 100644 --- a/src/local-state/__tests__/LocalState/forcedResolvers.test.ts +++ b/src/local-state/__tests__/LocalState/forcedResolvers.test.ts @@ -40,6 +40,7 @@ test("runs resolvers marked with @client(always: true)", async () => { context: {}, variables: {}, remoteResult: { data: client.readQuery({ query: document }) }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -65,6 +66,7 @@ test("runs resolvers marked with @client(always: true)", async () => { context: {}, variables: {}, remoteResult: { data: client.readQuery({ query: document }) }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -120,6 +122,7 @@ test("only runs forced resolvers for fields marked with `@client(always: true)`, variables: {}, remoteResult: undefined, onlyRunForcedResolvers: true, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { name: "John Smith", isLoggedIn: true }, @@ -185,6 +188,7 @@ test("runs nested forced resolvers from non-forced client descendant field", asy variables: {}, remoteResult: undefined, onlyRunForcedResolvers: true, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -238,6 +242,7 @@ test("warns for client fields without cached data and resolvers when running for variables: {}, remoteResult: { data: { user: { __typename: "User", id: 1 } } }, onlyRunForcedResolvers: true, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ // Note: name is null because we are only running forced resolvers and diff --git a/src/local-state/__tests__/LocalState/fragments.test.ts b/src/local-state/__tests__/LocalState/fragments.test.ts index 892fd371fc5..df7e31482ac 100644 --- a/src/local-state/__tests__/LocalState/fragments.test.ts +++ b/src/local-state/__tests__/LocalState/fragments.test.ts @@ -52,6 +52,7 @@ test("handles @client fields inside fragments", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -100,6 +101,7 @@ test("handles a mix of @client fields with fragments and server fields", async ( context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -154,6 +156,7 @@ it("matches fragments with fragment conditions", async () => { context: {}, variables: {}, remoteResult, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { @@ -200,6 +203,7 @@ test("throws when cache does not implement fragmentMatches", async () => { context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).rejects.toEqual( new InvariantError( @@ -240,6 +244,7 @@ test("does not traverse fragment when fragment spread type condition does not ma context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo" } } }); }); @@ -277,6 +282,7 @@ test("can use a fragments on interface types defined by possibleTypes", async () context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { diff --git a/src/local-state/__tests__/LocalState/partialData.test.ts b/src/local-state/__tests__/LocalState/partialData.test.ts index a6eb3bf0f29..397f4862dbb 100644 --- a/src/local-state/__tests__/LocalState/partialData.test.ts +++ b/src/local-state/__tests__/LocalState/partialData.test.ts @@ -30,6 +30,7 @@ test("omits field and does not warn if resolver not defined when returnPartialDa variables: {}, remoteResult: { data: { user: { __typename: "User", id: 1 } } }, returnPartialData: true, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { user: { __typename: "User", id: 1 } }, @@ -81,6 +82,7 @@ test("omits client fields without cached values when running forced resolvers wi remoteResult: { data: { user: { __typename: "User", id: 1 } } }, returnPartialData: true, onlyRunForcedResolvers: true, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ // Note: name is omitted because we are only running forced resolvers and diff --git a/src/local-state/__tests__/LocalState/rootValue.test.ts b/src/local-state/__tests__/LocalState/rootValue.test.ts index ba0013dd584..fb0b2f9ebc4 100644 --- a/src/local-state/__tests__/LocalState/rootValue.test.ts +++ b/src/local-state/__tests__/LocalState/rootValue.test.ts @@ -37,6 +37,7 @@ test("passes parent value as empty object to root resolver for client-only query context: {}, variables: {}, remoteResult: undefined, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { foo: { __typename: "Foo", bar: true } }, @@ -79,6 +80,7 @@ test("passes rootValue as remote result to root resolver when server fields are context: {}, variables: {}, remoteResult: { data: { bar: { __typename: "Bar", baz: true } } }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { diff --git a/src/local-state/__tests__/LocalState/subscriptions.test.ts b/src/local-state/__tests__/LocalState/subscriptions.test.ts index 81660e35b8b..30df1bf6b3b 100644 --- a/src/local-state/__tests__/LocalState/subscriptions.test.ts +++ b/src/local-state/__tests__/LocalState/subscriptions.test.ts @@ -26,6 +26,7 @@ test("throws when given a subscription with no client fields", async () => { context: {}, variables: {}, remoteResult: { data: { field: 1 } }, + fetchPolicy: "cache-first", }) ).rejects.toEqual( new InvariantError("Expected document to contain `@client` fields.") @@ -64,6 +65,7 @@ test("adds @client fields with subscription results", async () => { context: {}, variables: {}, remoteResult: { data: { field: 1 } }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { field: 1, count: 1 }, @@ -76,6 +78,7 @@ test("adds @client fields with subscription results", async () => { context: {}, variables: {}, remoteResult: { data: { field: 2 } }, + fetchPolicy: "cache-first", }) ).resolves.toStrictEqualTyped({ data: { field: 2, count: 2 }, diff --git a/src/local-state/__tests__/LocalState/testUtils.ts b/src/local-state/__tests__/LocalState/testUtils.ts index 4e1c07fa21e..325bbdfcc27 100644 --- a/src/local-state/__tests__/LocalState/testUtils.ts +++ b/src/local-state/__tests__/LocalState/testUtils.ts @@ -4,3 +4,10 @@ import { addTypenameToDocument } from "@apollo/client/utilities"; export const gql = (...args: Parameters) => addTypenameToDocument(origGql(...args)); + +export const WARNINGS = { + MISSING_RESOLVER: + "Could not find a resolver for the '%s' field nor does the cache resolve the field. The field value has been set to `null`. Either define a resolver for the field or ensure the cache can resolve the value, for example, by adding a 'read' function to a field policy in 'InMemoryCache'.", + NO_CACHE: + "The '%s' field resolves the value from the cache, for example from a 'read' function, but a 'no-cache' fetch policy was used. The field value has been set to `null`. Either define a local resolver or use a fetch policy that uses the cache to ensure the field is resolved correctly.", +}; From c5d5630eb2aa1ec1d8a0cf16df46da82d9403a6c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 26 Sep 2025 09:06:10 -0600 Subject: [PATCH 247/254] Version Packages (alpha) (#12940) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .changeset/pre.json | 7 ++++++- CHANGELOG.md | 20 ++++++++++++++++++++ package-lock.json | 4 ++-- package.json | 2 +- 4 files changed, 29 insertions(+), 4 deletions(-) diff --git a/.changeset/pre.json b/.changeset/pre.json index 562f7477ac3..86eb5c1618e 100644 --- a/.changeset/pre.json +++ b/.changeset/pre.json @@ -8,9 +8,14 @@ }, "changesets": [ "cold-kiwis-give", + "flat-worms-notice", "funny-bats-hammer", "little-yaks-decide", "neat-lemons-shave", - "six-islands-drum" + "olive-queens-fold", + "perfect-crabs-smile", + "shaggy-islands-yell", + "six-islands-drum", + "unlucky-cooks-rhyme" ] } diff --git a/CHANGELOG.md b/CHANGELOG.md index 70afc0590a8..98b1810007c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # @apollo/client +## 4.1.0-alpha.1 + +### Minor Changes + +- [#12934](https://github.com/apollographql/apollo-client/pull/12934) [`54ab6d9`](https://github.com/apollographql/apollo-client/commit/54ab6d994692dad9f06d3d0b84c84d021d126577) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Don't set the fallback value of a `@client` field to `null` when a `read` function is defined. Instead the `read` function will be called with an `existing` value of `undefined` to allow default arguments to be used to set the returned value. + + When a `read` function is not defined nor is there a defined resolver for the field, warn and set the value to `null` only in that instance. + +- [#12934](https://github.com/apollographql/apollo-client/pull/12934) [`54ab6d9`](https://github.com/apollographql/apollo-client/commit/54ab6d994692dad9f06d3d0b84c84d021d126577) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Add an abstract `resolvesClientField` function to `ApolloCache` that can be used by caches to tell `LocalState` if it can resolve a `@client` field when a local resolver is not defined. + + `LocalState` will emit a warning and set a fallback value of `null` when no local resolver is defined and `resolvesClientField` returns `false`, or isn't defined. Returning `true` from `resolvesClientField` signals that a mechanism in the cache will set the field value. In this case, `LocalState` won't set the field value. + +### Patch Changes + +- [#12915](https://github.com/apollographql/apollo-client/pull/12915) [`c97b145`](https://github.com/apollographql/apollo-client/commit/c97b145188d39d754ff098ff399a80cae5b10cc0) Thanks [@phryneas](https://github.com/phryneas)! - Create mechanism to add experimental features to Apollo Client + +- [#12934](https://github.com/apollographql/apollo-client/pull/12934) [`54ab6d9`](https://github.com/apollographql/apollo-client/commit/54ab6d994692dad9f06d3d0b84c84d021d126577) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Ensure `LocalState` doesn't try to read from the cache when using a `no-cache` fetch policy. + +- [#12934](https://github.com/apollographql/apollo-client/pull/12934) [`54ab6d9`](https://github.com/apollographql/apollo-client/commit/54ab6d994692dad9f06d3d0b84c84d021d126577) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Warn when using a `no-cache` fetch policy without a local resolver defined. `no-cache` queries do not read or write to the cache which meant `no-cache` queries are silently incomplete when the `@client` field value was handled by a cache `read` function. + ## 4.1.0-alpha.0 ### Minor Changes diff --git a/package-lock.json b/package-lock.json index 02d8b0bd9b3..56e37acae52 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@apollo/client", - "version": "4.1.0-alpha.0", + "version": "4.1.0-alpha.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@apollo/client", - "version": "4.1.0-alpha.0", + "version": "4.1.0-alpha.1", "hasInstallScript": true, "license": "MIT", "workspaces": [ diff --git a/package.json b/package.json index d8d7ef315d7..77ba06d4cc4 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@apollo/client", - "version": "4.1.0-alpha.0", + "version": "4.1.0-alpha.1", "description": "A fully-featured caching GraphQL client.", "private": true, "keywords": [ From 1c82eafe4921a9e30128202623be6c5a3d4df803 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 10 Oct 2025 09:26:45 -0600 Subject: [PATCH 248/254] Throw an error when using `@stream` without a configured `incrementalDelivery` handler (#12954) --- .changeset/popular-files-glow.md | 5 +++++ .size-limits.json | 8 ++++---- src/__tests__/ApolloClient.ts | 23 +++++++++++++++++++++- src/incremental/handlers/notImplemented.ts | 4 ++-- 4 files changed, 33 insertions(+), 7 deletions(-) create mode 100644 .changeset/popular-files-glow.md diff --git a/.changeset/popular-files-glow.md b/.changeset/popular-files-glow.md new file mode 100644 index 00000000000..53955edd598 --- /dev/null +++ b/.changeset/popular-files-glow.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +Ensure an error is thrown when `@stream` is detected and an `incrementalDelivery` handler is not configured. diff --git a/.size-limits.json b/.size-limits.json index 7bfe2604146..e214463c49d 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44753, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39420, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33901, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27727 + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44752, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39500, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33897, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27749 } diff --git a/src/__tests__/ApolloClient.ts b/src/__tests__/ApolloClient.ts index b491ae75741..a0381c9daef 100644 --- a/src/__tests__/ApolloClient.ts +++ b/src/__tests__/ApolloClient.ts @@ -3052,7 +3052,28 @@ describe("ApolloClient", () => { await expect(() => client.query({ query })).rejects.toThrow( new InvariantError( - "`@defer` is not supported without specifying an incremental handler. Please pass a handler as the `incrementalHandler` option to the `ApolloClient` constructor." + "`@defer` and `@stream` are not supported without specifying an incremental handler. Please pass a handler as the `incrementalHandler` option to the `ApolloClient` constructor." + ) + ); + }); + + test("will error when used with `@stream` in a without specifying an incremental strategy", async () => { + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + const query = gql` + query { + items @stream { + bar + } + } + `; + + await expect(() => client.query({ query })).rejects.toThrow( + new InvariantError( + "`@defer` and `@stream` are not supported without specifying an incremental handler. Please pass a handler as the `incrementalHandler` option to the `ApolloClient` constructor." ) ); }); diff --git a/src/incremental/handlers/notImplemented.ts b/src/incremental/handlers/notImplemented.ts index f4c02545c57..ce7f287ac00 100644 --- a/src/incremental/handlers/notImplemented.ts +++ b/src/incremental/handlers/notImplemented.ts @@ -22,8 +22,8 @@ export class NotImplementedHandler implements Incremental.Handler { } prepareRequest(request: ApolloLink.Request) { invariant( - !hasDirectives(["defer"], request.query), - "`@defer` is not supported without specifying an incremental handler. Please pass a handler as the `incrementalHandler` option to the `ApolloClient` constructor." + !hasDirectives(["defer", "stream"], request.query), + "`@defer` and `@stream` are not supported without specifying an incremental handler. Please pass a handler as the `incrementalHandler` option to the `ApolloClient` constructor." ); return request; From 556e83781069d925a7e8f99e49023f6f858c6438 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Fri, 10 Oct 2025 09:41:15 -0600 Subject: [PATCH 249/254] Add `context` as callback to `useMutation` `mutate` function (#12959) --- .api-reports/api-report-react.api.md | 4 +- .changeset/big-flowers-move.md | 23 + docs/source/data/mutations.mdx | 25 + .../__tests__/useMutation/context.test.tsx | 434 ++++++++++++++++++ src/react/hooks/useMutation.ts | 22 +- 5 files changed, 505 insertions(+), 3 deletions(-) create mode 100644 .changeset/big-flowers-move.md create mode 100644 src/react/hooks/__tests__/useMutation/context.test.tsx diff --git a/.api-reports/api-report-react.api.md b/.api-reports/api-report-react.api.md index 213a6882526..9861b908b08 100644 --- a/.api-reports/api-report-react.api.md +++ b/.api-reports/api-report-react.api.md @@ -639,7 +639,9 @@ export namespace useMutation { } ]) => Promise>>; // (undocumented) - export type MutationFunctionOptions = Options; + export type MutationFunctionOptions = Options & { + context?: DefaultContext | ((hookContext: DefaultContext | undefined) => DefaultContext); + }; // (undocumented) export interface Options = Partial> { awaitRefetchQueries?: boolean; diff --git a/.changeset/big-flowers-move.md b/.changeset/big-flowers-move.md new file mode 100644 index 00000000000..c80ffc762f5 --- /dev/null +++ b/.changeset/big-flowers-move.md @@ -0,0 +1,23 @@ +--- +"@apollo/client": minor +--- + +You can now provide a callback function as the `context` option on the `mutate` function returned by `useMutation`. The callback function is called with the value of the `context` option provided to the `useMutation` hook. This is useful if you'd like to merge the context object provided to the `useMutation` hook with a value provided to the `mutate` function. + + +```ts +function MyComponent() { + const [mutate, result] = useMutation(MUTATION, { + context: { foo: true } + }); + + async function runMutation() { + await mutate({ + // sends context as { foo: true, bar: true } + context: (hookContext) => ({ ...hookContext, bar: true }) + }); + } + + // ... +} +``` diff --git a/docs/source/data/mutations.mdx b/docs/source/data/mutations.mdx index f12a780c3b2..15fe52b449e 100644 --- a/docs/source/data/mutations.mdx +++ b/docs/source/data/mutations.mdx @@ -156,6 +156,31 @@ When using TypeScript, you might see an error related to a missing variable when + + +##### Merging `context` from the hook and `mutate` function + + + +Due to option precedence, `context` provided to the `mutate` function overrides `context` provided to the `useMutation` hook. In some cases, you might want to merge the `context` value provided to the hook with a value available at the time you execute the `mutate` function. + +You accomplish this by using a callback function for the `context` option provided to the `mutate` function. The callback function is called with the `context` value provided to the hook, allowing you to merge them together. + +```ts +addTodo({ + context: (hookContext) => ({ + ...hookContext, + myCustomValue: true, + }), +}); +``` + + + +Your callback function is not required to merge the context values together. The `context` value sent to the link chain is the value returned from the function which makes it possible to change the `context` value in any way you wish, such as omitting a property from the hook context. + + + ### Tracking mutation status In addition to a mutate function, the `useMutation` hook returns an object that represents the current state of the mutation's execution. The fields of this object include booleans that indicate whether the mutate function has been `called` and whether the mutation's result is currently `loading`. diff --git a/src/react/hooks/__tests__/useMutation/context.test.tsx b/src/react/hooks/__tests__/useMutation/context.test.tsx new file mode 100644 index 00000000000..aae00dd8693 --- /dev/null +++ b/src/react/hooks/__tests__/useMutation/context.test.tsx @@ -0,0 +1,434 @@ +import { + disableActEnvironment, + renderHookToSnapshotStream, +} from "@testing-library/react-render-stream"; +import React from "react"; +import { delay, of } from "rxjs"; + +import { ApolloClient, ApolloLink, gql, InMemoryCache } from "@apollo/client"; +import { ApolloProvider, useMutation } from "@apollo/client/react"; + +const echoContextLink = new ApolloLink((operation) => { + // filter out internal client set context values + const { queryDeduplication, optimisticResponse, ...context } = + operation.getContext(); + return of({ + data: { echo: { context } }, + }).pipe(delay(20)); +}); + +test("context is provided from hook", async () => { + const mutation = gql` + mutation { + echo { + context + } + } + `; + + const client = new ApolloClient({ + link: echoContextLink, + cache: new InMemoryCache(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation, { context: { foo: true } }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [execute] = getCurrentSnapshot(); + + await execute(); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: { echo: { context: { foo: true } } }, + error: undefined, + loading: false, + called: true, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("context provided to execute function overrides hook context", async () => { + const mutation = gql` + mutation { + echo { + context + } + } + `; + + const client = new ApolloClient({ + link: echoContextLink, + cache: new InMemoryCache(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation, { context: { foo: true } }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [execute] = getCurrentSnapshot(); + + await execute({ context: { bar: true } }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: { echo: { context: { bar: true } } }, + error: undefined, + loading: false, + called: true, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("allows context as callback called with context from hook", async () => { + const mutation = gql` + mutation { + echo { + context + } + } + `; + + const client = new ApolloClient({ + link: echoContextLink, + cache: new InMemoryCache(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation, { context: { foo: true } }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [execute] = getCurrentSnapshot(); + + const contextFn = jest.fn((ctx) => ({ ...ctx, bar: true })); + await execute({ context: contextFn }); + + expect(contextFn).toHaveBeenCalledTimes(1); + expect(contextFn).toHaveBeenCalledWith({ foo: true }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: { + echo: { context: { foo: true, bar: true } }, + }, + error: undefined, + loading: false, + called: true, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("provides undefined to context callback if context is not provided to hook", async () => { + const mutation = gql` + mutation { + echo { + context + } + } + `; + + const client = new ApolloClient({ + link: echoContextLink, + cache: new InMemoryCache(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [execute] = getCurrentSnapshot(); + + const contextFn = jest.fn((ctx) => ({ ...ctx, bar: true })); + await execute({ context: contextFn }); + + expect(contextFn).toHaveBeenCalledTimes(1); + expect(contextFn).toHaveBeenCalledWith(undefined); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: { + echo: { context: { bar: true } }, + }, + error: undefined, + loading: false, + called: true, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("does not merge returned context from context callback with hook", async () => { + const mutation = gql` + mutation { + echo { + context + } + } + `; + + const client = new ApolloClient({ + link: echoContextLink, + cache: new InMemoryCache(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation, { context: { foo: true } }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [execute] = getCurrentSnapshot(); + + const contextFn = jest.fn(() => ({ baz: true })); + await execute({ context: contextFn }); + + expect(contextFn).toHaveBeenCalledTimes(1); + expect(contextFn).toHaveBeenCalledWith({ foo: true }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: { + echo: { context: { baz: true } }, + }, + error: undefined, + loading: false, + called: true, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("provides full context returned from callback to update function", async () => { + const mutation = gql` + mutation { + echo { + context + } + } + `; + + const client = new ApolloClient({ + link: echoContextLink, + cache: new InMemoryCache(), + }); + + using _disabledAct = disableActEnvironment(); + const update = jest.fn(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useMutation(mutation, { context: { foo: true }, update }), + { + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: false, + called: false, + }); + } + + const [execute] = getCurrentSnapshot(); + + await execute({ context: (ctx) => ({ ...ctx, bar: true }) }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: undefined, + error: undefined, + loading: true, + called: true, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toStrictEqualTyped({ + data: { + echo: { context: { foo: true, bar: true } }, + }, + error: undefined, + loading: false, + called: true, + }); + } + + await expect(takeSnapshot).not.toRerender(); + + expect(update).toHaveBeenCalledTimes(1); + expect(update).toHaveBeenCalledWith( + client.cache, + { + data: { + echo: { context: { foo: true, bar: true } }, + }, + }, + { context: { foo: true, bar: true }, variables: {} } + ); +}); diff --git a/src/react/hooks/useMutation.ts b/src/react/hooks/useMutation.ts index e7378595202..14c23ec398f 100644 --- a/src/react/hooks/useMutation.ts +++ b/src/react/hooks/useMutation.ts @@ -160,7 +160,18 @@ export declare namespace useMutation { TData = unknown, TVariables extends OperationVariables = OperationVariables, TCache extends ApolloCache = ApolloCache, - > = Options; + > = Options & { + /** + * {@inheritDoc @apollo/client!MutationOptionsDocumentation#context:member} + * + * @remarks + * When provided as a callback function, the function is called with the + * value of `context` provided to the `useMutation` hook. + */ + context?: + | DefaultContext + | ((hookContext: DefaultContext | undefined) => DefaultContext); + }; export namespace DocumentationTypes { /** {@inheritDoc @apollo/client/react!useMutation:function(1)} */ @@ -271,6 +282,10 @@ export function useMutation< const { options, mutation } = ref.current; const baseOptions = { ...options, mutation }; const client = executeOptions.client || ref.current.client; + const context = + typeof executeOptions.context === "function" ? + executeOptions.context(options?.context) + : executeOptions.context; if (!ref.current.result.loading && ref.current.isMounted) { setResult( @@ -285,7 +300,10 @@ export function useMutation< } const mutationId = ++ref.current.mutationId; - const clientOptions = mergeOptions(baseOptions, executeOptions as any); + const clientOptions = mergeOptions(baseOptions, { + ...executeOptions, + context, + } as any); return preventUnhandledRejection( client From f8dab9d6d99120640515b68bbaba06b839e51941 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 10 Oct 2025 09:50:03 -0600 Subject: [PATCH 250/254] Version Packages (alpha) (#12962) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .changeset/pre.json | 2 ++ CHANGELOG.md | 27 +++++++++++++++++++++++++++ package-lock.json | 4 ++-- package.json | 2 +- 4 files changed, 32 insertions(+), 3 deletions(-) diff --git a/.changeset/pre.json b/.changeset/pre.json index 86eb5c1618e..73dac31dce9 100644 --- a/.changeset/pre.json +++ b/.changeset/pre.json @@ -7,6 +7,7 @@ "@apollo/client-codemod-migrate-3-to-4": "1.0.2" }, "changesets": [ + "big-flowers-move", "cold-kiwis-give", "flat-worms-notice", "funny-bats-hammer", @@ -14,6 +15,7 @@ "neat-lemons-shave", "olive-queens-fold", "perfect-crabs-smile", + "popular-files-glow", "shaggy-islands-yell", "six-islands-drum", "unlucky-cooks-rhyme" diff --git a/CHANGELOG.md b/CHANGELOG.md index 98b1810007c..e056190bf04 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,32 @@ # @apollo/client +## 4.1.0-alpha.2 + +### Minor Changes + +- [#12959](https://github.com/apollographql/apollo-client/pull/12959) [`556e837`](https://github.com/apollographql/apollo-client/commit/556e83781069d925a7e8f99e49023f6f858c6438) Thanks [@jerelmiller](https://github.com/jerelmiller)! - You can now provide a callback function as the `context` option on the `mutate` function returned by `useMutation`. The callback function is called with the value of the `context` option provided to the `useMutation` hook. This is useful if you'd like to merge the context object provided to the `useMutation` hook with a value provided to the `mutate` function. + + ```ts + function MyComponent() { + const [mutate, result] = useMutation(MUTATION, { + context: { foo: true }, + }); + + async function runMutation() { + await mutate({ + // sends context as { foo: true, bar: true } + context: (hookContext) => ({ ...hookContext, bar: true }), + }); + } + + // ... + } + ``` + +### Patch Changes + +- [#12954](https://github.com/apollographql/apollo-client/pull/12954) [`1c82eaf`](https://github.com/apollographql/apollo-client/commit/1c82eafe4921a9e30128202623be6c5a3d4df803) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Ensure an error is thrown when `@stream` is detected and an `incrementalDelivery` handler is not configured. + ## 4.1.0-alpha.1 ### Minor Changes diff --git a/package-lock.json b/package-lock.json index 56e37acae52..5ca228932a7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@apollo/client", - "version": "4.1.0-alpha.1", + "version": "4.1.0-alpha.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@apollo/client", - "version": "4.1.0-alpha.1", + "version": "4.1.0-alpha.2", "hasInstallScript": true, "license": "MIT", "workspaces": [ diff --git a/package.json b/package.json index 77ba06d4cc4..37d59341ada 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@apollo/client", - "version": "4.1.0-alpha.1", + "version": "4.1.0-alpha.2", "description": "A fully-featured caching GraphQL client.", "private": true, "keywords": [ From 072da24a8daec3a646ef0cce30de32f95ea0bb23 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 20 Oct 2025 18:42:38 -0600 Subject: [PATCH 251/254] Update `accept` header used with `GraphQL17Alpha9Handler` (#12973) Updates the header based on changes in https://github.com/apollographql/apollo-server/pull/8148 --- .changeset/slimy-ducks-scream.md | 5 +++++ .size-limits.json | 8 ++++---- src/incremental/handlers/graphql17Alpha9.ts | 6 +++++- src/link/http/__tests__/HttpLink.ts | 4 ++-- 4 files changed, 16 insertions(+), 7 deletions(-) create mode 100644 .changeset/slimy-ducks-scream.md diff --git a/.changeset/slimy-ducks-scream.md b/.changeset/slimy-ducks-scream.md new file mode 100644 index 00000000000..66187335a80 --- /dev/null +++ b/.changeset/slimy-ducks-scream.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +Update the `accept` header used with the `GraphQL17Alpha9Handler` to `multipart/mixed;incrementalSpec=v0.2` to ensure the newest incremental delivery format is requested. diff --git a/.size-limits.json b/.size-limits.json index e214463c49d..6671f69ef70 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44752, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39500, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33897, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27749 + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44831, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39452, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33875, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27756 } diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 51ea4acc714..ee671c18504 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -259,7 +259,11 @@ export class GraphQL17Alpha9Handler if (hasDirectives(["defer", "stream"], request.query)) { const context = request.context ?? {}; const http = (context.http ??= {}); - http.accept = ["multipart/mixed", ...(http.accept || [])]; + // https://specs.apollo.dev/incremental/v0.2/ + http.accept = [ + "multipart/mixed;incrementalSpec=v0.2", + ...(http.accept || []), + ]; request.context = context; } diff --git a/src/link/http/__tests__/HttpLink.ts b/src/link/http/__tests__/HttpLink.ts index 69792fab1c3..53586aeb351 100644 --- a/src/link/http/__tests__/HttpLink.ts +++ b/src/link/http/__tests__/HttpLink.ts @@ -1636,7 +1636,7 @@ describe("HttpLink", () => { headers: { "content-type": "application/json", accept: - "multipart/mixed,application/graphql-response+json,application/json;q=0.9", + "multipart/mixed;incrementalSpec=v0.2,application/graphql-response+json,application/json;q=0.9", }, }) ); @@ -1744,7 +1744,7 @@ describe("HttpLink", () => { headers: { "content-type": "application/json", accept: - "multipart/mixed,application/graphql-response+json,application/json;q=0.9", + "multipart/mixed;incrementalSpec=v0.2,application/graphql-response+json,application/json;q=0.9", }, }) ); From 5c56b3210a2c03e247ec9e600f1e27eb71df5e96 Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 27 Oct 2025 09:15:49 -0600 Subject: [PATCH 252/254] Ignore `data` values set in subsequent chunks in incremental responses (#12982) --- .api-reports/api-report-incremental.api.md | 2 - .changeset/neat-windows-compete.md | 5 ++ .changeset/shaggy-brooms-talk.md | 5 ++ .size-limits.json | 4 +- .../__tests__/defer20220824/defer.test.ts | 75 ++++++++++++++++++ .../__tests__/graphql17Alpha9/defer.test.ts | 76 +++++++++++++++++++ src/incremental/handlers/defer20220824.ts | 9 ++- src/incremental/handlers/graphql17Alpha9.ts | 6 +- src/link/context/__tests__/index.ts | 8 +- 9 files changed, 177 insertions(+), 13 deletions(-) create mode 100644 .changeset/neat-windows-compete.md create mode 100644 .changeset/shaggy-brooms-talk.md diff --git a/.api-reports/api-report-incremental.api.md b/.api-reports/api-report-incremental.api.md index 523c2a8b193..911a8314481 100644 --- a/.api-reports/api-report-incremental.api.md +++ b/.api-reports/api-report-incremental.api.md @@ -52,8 +52,6 @@ namespace Defer20220824Handler { }; // (undocumented) type SubsequentResult> = { - data?: TData | null | undefined; - errors?: ReadonlyArray; extensions?: Record; hasNext: boolean; incremental?: Array>; diff --git a/.changeset/neat-windows-compete.md b/.changeset/neat-windows-compete.md new file mode 100644 index 00000000000..ae595909bad --- /dev/null +++ b/.changeset/neat-windows-compete.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +Ignore top-level `data` values on subsequent chunks in incremental responses. diff --git a/.changeset/shaggy-brooms-talk.md b/.changeset/shaggy-brooms-talk.md new file mode 100644 index 00000000000..0ec18d995e1 --- /dev/null +++ b/.changeset/shaggy-brooms-talk.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +Fix the `Defer20220824Handler.SubsequentResult` type to match the `FormattedSubsequentIncrementalExecutionResult` type in `graphql@17.0.0-alpha.2`. diff --git a/.size-limits.json b/.size-limits.json index 6671f69ef70..e9766e87bd3 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44831, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39452, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44773, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39426, "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33875, "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27756 } diff --git a/src/incremental/handlers/__tests__/defer20220824/defer.test.ts b/src/incremental/handlers/__tests__/defer20220824/defer.test.ts index 7ed32d8c991..ca7f340fba9 100644 --- a/src/incremental/handlers/__tests__/defer20220824/defer.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/defer.test.ts @@ -1037,3 +1037,78 @@ test("handles final chunk of { hasNext: false } correctly in usage with Apollo C }); await expect(observableStream).not.toEmitAnything(); }); + +// Servers that return a `data` property in subsequent payloads are technically +// invalid, but we still want to handle cases where the server misbehaves. +// +// See the following issue for more information: +// https://github.com/apollographql/apollo-client/issues/12976 +test("ignores `data` property added to subsequent chunks by misbehaving servers", async () => { + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + const client = new ApolloClient({ + link: httpLink, + cache: new InMemoryCache(), + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query HeroNameQuery { + hero { + id + ... @defer { + name + } + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + enqueueInitialChunk({ + data: { hero: { __typename: "Hero", id: "1" } }, + hasNext: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: undefined, + dataState: "empty", + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + hero: { + __typename: "Hero", + id: "1", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + // @ts-expect-error simulate misbehaving server + data: null, + incremental: [{ data: { name: "Luke" }, path: ["hero"] }], + hasNext: false, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: false, + data: { + hero: { + __typename: "Hero", + id: "1", + name: "Luke", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts index 885a428f267..198df5f19dc 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/defer.test.ts @@ -2629,3 +2629,79 @@ test("handles final chunk of { hasNext: false } correctly in usage with Apollo C }); await expect(observableStream).not.toEmitAnything(); }); + +// Servers that return a `data` property in subsequent payloads are technically +// invalid, but we still want to handle cases where the server misbehaves. +// +// See the following issue for more information: +// https://github.com/apollographql/apollo-client/issues/12976 +test("ignores `data` property added to subsequent chunks by misbehaving servers", async () => { + const stream = mockDeferStreamGraphQL17Alpha9(); + const client = new ApolloClient({ + link: stream.httpLink, + cache: new InMemoryCache(), + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query HeroNameQuery { + hero { + id + ... @defer { + name + } + } + } + `; + + const observableStream = new ObservableStream(client.watchQuery({ query })); + + stream.enqueueInitialChunk({ + data: { hero: { __typename: "Hero", id: "1" } }, + pending: [{ id: "0", path: ["hero"] }], + hasNext: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: undefined, + dataState: "empty", + networkStatus: NetworkStatus.loading, + partial: true, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: true, + data: markAsStreaming({ + hero: { + __typename: "Hero", + id: "1", + }, + }), + dataState: "streaming", + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + stream.enqueueSubsequentChunk({ + // @ts-expect-error simulate misbehaving server + data: null, + incremental: [{ data: { name: "Luke" }, id: "0" }], + completed: [{ id: "0" }], + hasNext: false, + }); + + await expect(observableStream).toEmitTypedValue({ + loading: false, + data: { + hero: { + __typename: "Hero", + id: "1", + name: "Luke", + }, + }, + dataState: "complete", + networkStatus: NetworkStatus.ready, + partial: false, + }); +}); diff --git a/src/incremental/handlers/defer20220824.ts b/src/incremental/handlers/defer20220824.ts index 13a495f4ab6..1d4e3d1fbb3 100644 --- a/src/incremental/handlers/defer20220824.ts +++ b/src/incremental/handlers/defer20220824.ts @@ -33,8 +33,6 @@ export declare namespace Defer20220824Handler { }; export type SubsequentResult> = { - data?: TData | null | undefined; - errors?: ReadonlyArray; extensions?: Record; hasNext: boolean; incremental?: Array>; @@ -99,7 +97,6 @@ class DeferRequest> ): FormattedExecutionResult { this.hasNext = chunk.hasNext; this.data = cacheData; - this.merge(chunk); if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { @@ -135,6 +132,8 @@ class DeferRequest> arrayMerge ); } + } else { + this.merge(chunk); } const result: FormattedExecutionResult = { data: this.data }; @@ -178,7 +177,9 @@ export class Defer20220824Handler } }; if (this.isIncrementalResult(result)) { - push(result); + if ("errors" in result) { + push(result); + } if (hasIncrementalChunks(result)) { result.incremental.forEach(push); } diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index ee671c18504..fa08b8b4169 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -119,8 +119,6 @@ class IncrementalRequest } } - this.merge(chunk, "truncate"); - if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { const pending = this.pending.find(({ id }) => incremental.id === id); @@ -194,6 +192,8 @@ class IncrementalRequest arrayMerge ); } + } else { + this.merge(chunk, "truncate"); } if ("completed" in chunk && chunk.completed) { @@ -286,7 +286,7 @@ export class GraphQL17Alpha9Handler if (this.isIncrementalResult(result)) { push(new IncrementalRequest().handle(undefined, result)); - } else { + } else if ("errors" in result) { push(result); } diff --git a/src/link/context/__tests__/index.ts b/src/link/context/__tests__/index.ts index 6a5a7c8d13d..d5ff313c81b 100644 --- a/src/link/context/__tests__/index.ts +++ b/src/link/context/__tests__/index.ts @@ -1,3 +1,5 @@ +import assert from "node:assert"; + import { gql } from "graphql-tag"; import { Observable, of } from "rxjs"; @@ -268,6 +270,8 @@ test("can access the client from operation argument", async () => { const link = withContext.concat(mockLink); const stream = new ObservableStream(execute(link, { query }, { client })); - const { data } = await stream.takeNext(); - expect(data!.client).toBe(client); + const result = await stream.takeNext(); + + assert("data" in result); + expect(result.data!.client).toBe(client); }); From d11eb40aa41d90ac664705bac01158d58bf55e9b Mon Sep 17 00:00:00 2001 From: Jerel Miller Date: Mon, 27 Oct 2025 09:35:01 -0600 Subject: [PATCH 253/254] Add array support to `useFragment`, `useSuspenseFragment`, and `client.watchFragment` (#12971) Co-authored-by: Lenz Weber-Tronic Co-authored-by: jerelmiller <565661+jerelmiller@users.noreply.github.com> --- .api-reports/api-report-cache.api.md | 52 +- .api-reports/api-report-core.api.md | 26 +- .api-reports/api-report-react.api.md | 59 +- .api-reports/api-report-react_internal.api.md | 11 +- .api-reports/api-report-utilities.api.md | 2 +- .../api-report-utilities_internal.api.md | 7 +- .api-reports/api-report.api.md | 91 +- .changeset/famous-hats-explode.md | 5 + .changeset/large-ligers-prove.md | 13 + .changeset/old-singers-eat.md | 17 + .changeset/poor-knives-smile.md | 19 + .changeset/spicy-eels-switch.md | 5 + .size-limits.json | 8 +- config/jest.config.ts | 1 + docs/source/data/fragments.mdx | 118 +- knip.config.js | 2 + src/__tests__/__snapshots__/exports.ts.snap | 1 + .../cache.watchFragment/types.test.ts | 277 +++++ src/cache/core/cache.ts | 423 +++++-- src/cache/inmemory/inMemoryCache.ts | 12 +- src/core/ApolloClient.ts | 136 ++- .../client.watchFragment/arrays.test.ts | 1030 ++++++++++++++++ .../client.watchFragment/general.test.ts | 190 +++ .../getCurrentResult.test.ts | 624 ++++++++++ .../client.watchFragment/types.test.ts | 281 +++++ .../hooks/__tests__/useFragment.test.tsx | 112 +- .../__tests__/useFragment/arrays.test.tsx | 585 +++++++++ .../__tests__/useSuspenseFragment.test.tsx | 105 +- .../useSuspenseFragment/arrays.test.tsx | 1081 +++++++++++++++++ src/react/hooks/useFragment.ts | 214 ++-- src/react/hooks/useSuspenseFragment.ts | 124 +- src/react/internal/cache/FragmentReference.ts | 46 +- src/react/internal/cache/SuspenseCache.ts | 2 +- src/react/internal/cache/types.ts | 2 +- src/testing/matchers/index.d.ts | 9 + src/testing/matchers/index.ts | 4 + .../matchers/toHaveFragmentWatchesOn.ts | 99 ++ src/testing/matchers/toHaveNumWatches.ts | 35 + src/utilities/DeepPartial.ts | 4 +- .../internal/combineLatestBatched.ts | 80 ++ src/utilities/internal/index.ts | 1 + 41 files changed, 5504 insertions(+), 409 deletions(-) create mode 100644 .changeset/famous-hats-explode.md create mode 100644 .changeset/large-ligers-prove.md create mode 100644 .changeset/old-singers-eat.md create mode 100644 .changeset/poor-knives-smile.md create mode 100644 .changeset/spicy-eels-switch.md create mode 100644 src/cache/core/__tests__/cache.watchFragment/types.test.ts create mode 100644 src/core/__tests__/client.watchFragment/arrays.test.ts create mode 100644 src/core/__tests__/client.watchFragment/general.test.ts create mode 100644 src/core/__tests__/client.watchFragment/getCurrentResult.test.ts create mode 100644 src/core/__tests__/client.watchFragment/types.test.ts create mode 100644 src/react/hooks/__tests__/useFragment/arrays.test.tsx create mode 100644 src/react/hooks/__tests__/useSuspenseFragment/arrays.test.tsx create mode 100644 src/testing/matchers/toHaveFragmentWatchesOn.ts create mode 100644 src/testing/matchers/toHaveNumWatches.ts create mode 100644 src/utilities/internal/combineLatestBatched.ts diff --git a/.api-reports/api-report-cache.api.md b/.api-reports/api-report-cache.api.md index 3ff92332c84..cd57659d197 100644 --- a/.api-reports/api-report-cache.api.md +++ b/.api-reports/api-report-cache.api.md @@ -20,10 +20,12 @@ import { getApolloCacheMemoryInternals } from '@apollo/client/utilities/internal import type { GetDataState } from '@apollo/client'; import { getInMemoryCacheMemoryInternals } from '@apollo/client/utilities/internal'; import type { InlineFragmentNode } from 'graphql'; +import type { IsAny } from '@apollo/client/utilities/internal'; import { isReference } from '@apollo/client/utilities'; import type { NoInfer as NoInfer_2 } from '@apollo/client/utilities/internal'; import { Observable } from 'rxjs'; import type { OperationVariables } from '@apollo/client'; +import type { Prettify } from '@apollo/client/utilities/internal'; import { Reference } from '@apollo/client/utilities'; import type { SelectionSetNode } from 'graphql'; import type { StoreObject } from '@apollo/client/utilities'; @@ -39,20 +41,36 @@ type AllFieldsModifier> = Modifier extends Observable> { + getCurrentResult: () => ApolloCache.WatchFragmentResult; + } + export type WatchFragmentFromValue = StoreObject | Reference | FragmentType> | string | null; export interface WatchFragmentOptions { fragment: DocumentNode | TypedDocumentNode; fragmentName?: string; - from: StoreObject | Reference | FragmentType> | string; + from: ApolloCache.WatchFragmentFromValue | Array>; optimistic?: boolean; variables?: TVariables; } - export type WatchFragmentResult = ({ + export type WatchFragmentResult = true extends IsAny ? ({ + complete: true; + missing?: never; + } & GetDataState) | ({ + complete: false; + missing?: MissingTree; + } & GetDataState) : TData extends null | null[] ? Prettify<{ + complete: true; + missing?: never; + } & GetDataState> : Prettify<{ complete: true; missing?: never; - } & GetDataState) | ({ + } & GetDataState> | { complete: false; - missing: MissingTree; - } & GetDataState); + missing?: MissingTree; + data: TData extends Array ? Array | null> : DataValue.Partial; + dataState: "partial"; + }; } // @public (undocumented) @@ -77,6 +95,7 @@ export abstract class ApolloCache { lookupFragment(fragmentName: string): FragmentDefinitionNode | null; // (undocumented) modify = Record>(options: Cache_2.ModifyOptions): boolean; + protected onAfterBroadcast: (cb: () => void) => void; // (undocumented) abstract performTransaction(transaction: Transaction, optimisticId?: string | null): void; // (undocumented) @@ -106,7 +125,28 @@ export abstract class ApolloCache { updateQuery(options: Cache_2.UpdateQueryOptions, update: (data: Unmasked | null) => Unmasked | null | void): Unmasked | null; // (undocumented) abstract watch(watch: Cache_2.WatchOptions): () => void; - watchFragment(options: ApolloCache.WatchFragmentOptions): Observable>>; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions & { + from: Array>>; + }): ApolloCache.ObservableFragment>>; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions & { + from: Array; + }): ApolloCache.ObservableFragment>; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions & { + from: Array>; + }): ApolloCache.ObservableFragment | null>>; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions & { + from: null; + }): ApolloCache.ObservableFragment; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions & { + from: NonNullable>; + }): ApolloCache.ObservableFragment>; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions): ApolloCache.ObservableFragment | null>; // (undocumented) abstract write(write: Cache_2.WriteOptions): Reference | undefined; writeFragment({ data, fragment, fragmentName, variables, overwrite, id, broadcast, }: Cache_2.WriteFragmentOptions): Reference | undefined; diff --git a/.api-reports/api-report-core.api.md b/.api-reports/api-report-core.api.md index ab7effb8971..50b1aca96b3 100644 --- a/.api-reports/api-report-core.api.md +++ b/.api-reports/api-report-core.api.md @@ -224,6 +224,10 @@ export namespace ApolloClient { extensions?: Record; } // (undocumented) + export interface ObservableFragment extends Observable_2> { + getCurrentResult: () => ApolloClient.WatchFragmentResult; + } + // (undocumented) export interface Options { assumeImmutableResults?: boolean; cache: ApolloCache; @@ -295,7 +299,7 @@ export namespace ApolloClient { // (undocumented) export type WatchFragmentOptions = ApolloCache.WatchFragmentOptions; // (undocumented) - export type WatchFragmentResult = ApolloCache.WatchFragmentResult; + export type WatchFragmentResult = ApolloCache.WatchFragmentResult>; export type WatchQueryOptions = { fetchPolicy?: WatchQueryFetchPolicy; nextFetchPolicy?: WatchQueryFetchPolicy | ((this: WatchQueryOptions, currentFetchPolicy: WatchQueryFetchPolicy, context: NextFetchPolicyContext) => WatchQueryFetchPolicy); @@ -367,7 +371,23 @@ export class ApolloClient { subscribe(options: ApolloClient.SubscribeOptions): SubscriptionObservable>>; // (undocumented) version: string; - watchFragment(options: ApolloClient.WatchFragmentOptions): Observable_2>>; + watchFragment(options: ApolloClient.WatchFragmentOptions & { + from: Array>>; + }): ApolloClient.ObservableFragment>; + watchFragment(options: ApolloClient.WatchFragmentOptions & { + from: Array; + }): ApolloClient.ObservableFragment>; + // (undocumented) + watchFragment(options: ApolloClient.WatchFragmentOptions & { + from: Array>; + }): ApolloClient.ObservableFragment>; + watchFragment(options: ApolloClient.WatchFragmentOptions & { + from: null; + }): ApolloClient.ObservableFragment; + watchFragment(options: ApolloClient.WatchFragmentOptions & { + from: NonNullable>; + }): ApolloClient.ObservableFragment; + watchFragment(options: ApolloClient.WatchFragmentOptions): ApolloClient.ObservableFragment; watchQuery(options: ApolloClient.WatchQueryOptions): ObservableQuery; writeFragment(options: ApolloClient.WriteFragmentOptions): Reference_2 | undefined; writeQuery(options: ApolloClient.WriteQueryOptions): Reference_2 | undefined; @@ -1140,7 +1160,7 @@ export type WatchQueryOptions = StoreObject_2 | Reference_2 | FragmentType> | string | null; - // @public (undocumented) export function getApolloContext(): ReactTypes.Context; @@ -388,6 +381,21 @@ export namespace useBackgroundQuery { // @public @deprecated (undocumented) export type UseBackgroundQueryResult = useBackgroundQuery.Result; +// @public +export function useFragment(options: useFragment.Options & { + from: Array>>; +}): useFragment.Result>; + +// @public +export function useFragment(options: useFragment.Options & { + from: Array; +}): useFragment.Result>; + +// @public +export function useFragment(options: useFragment.Options & { + from: Array>; +}): useFragment.Result>; + // @public export function useFragment(options: useFragment.Options): useFragment.Result; @@ -421,12 +429,13 @@ export namespace useFragment { export namespace DocumentationTypes { export function useFragment({ fragment, from, fragmentName, variables, optimistic, client, }: useFragment.Options): useFragment.Result; } + export type FromValue = ApolloCache_2.WatchFragmentFromValue; // (undocumented) export interface Options { client?: ApolloClient; fragment: DocumentNode_2 | TypedDocumentNode_2; fragmentName?: string; - from: StoreObject | Reference | FragmentType> | string | null; + from: useFragment.FromValue | Array>; optimistic?: boolean; variables?: NoInfer_2; } @@ -434,10 +443,12 @@ export namespace useFragment { export type Result = ({ complete: true; missing?: never; - } & GetDataState, "complete">) | ({ + } & GetDataState, "complete">) | { complete: false; missing?: MissingTree; - } & GetDataState, "partial">); + data: TData extends Array ? Array | null> : DataValue.Partial; + dataState: "partial"; + }; } // @public @deprecated (undocumented) @@ -912,9 +923,24 @@ export namespace useSubscription { } } +// @public +export function useSuspenseFragment(options: useSuspenseFragment.Options & { + from: Array>>; +}): useSuspenseFragment.Result>; + +// @public +export function useSuspenseFragment(options: useSuspenseFragment.Options & { + from: Array; +}): useSuspenseFragment.Result>; + +// @public +export function useSuspenseFragment(options: useSuspenseFragment.Options & { + from: Array>; +}): useSuspenseFragment.Result>; + // @public export function useSuspenseFragment(options: useSuspenseFragment.Options & { - from: NonNullable>; + from: NonNullable>; }): useSuspenseFragment.Result; // @public @@ -924,7 +950,7 @@ export function useSuspenseFragment(options: useSuspenseFragment.Options & { - from: From; + from: useSuspenseFragment.FromValue; }): useSuspenseFragment.Result; // @public @@ -939,7 +965,7 @@ export namespace useSuspenseFragment { export type Options = { fragment: DocumentNode_2 | TypedDocumentNode_2; fragmentName?: string; - from: From; + from: useSuspenseFragment.FromValue | Array>; optimistic?: boolean; client?: ApolloClient; }; @@ -966,6 +992,7 @@ export namespace useSuspenseFragment { export namespace DocumentationTypes { export function useSuspenseFragment(options: useSuspenseFragment.Options): useSuspenseFragment.Result; } + export type FromValue = ApolloCache_2.WatchFragmentFromValue; // (undocumented) export type Options = Base.Options & VariablesOption>; // (undocumented) @@ -1088,10 +1115,6 @@ export namespace useSuspenseQuery { // @public @deprecated (undocumented) export type UseSuspenseQueryResult = useSuspenseQuery.Result; -// Warnings were encountered during analysis: -// -// src/react/hooks/useSuspenseFragment.ts:111:5 - (ae-forgotten-export) The symbol "From" needs to be exported by the entry point index.d.ts - // (No @packageDocumentation comment for this package) ``` diff --git a/.api-reports/api-report-react_internal.api.md b/.api-reports/api-report-react_internal.api.md index a8652491621..f327851b3a6 100644 --- a/.api-reports/api-report-react_internal.api.md +++ b/.api-reports/api-report-react_internal.api.md @@ -11,7 +11,6 @@ import type { DocumentNode } from 'graphql'; import type { InternalTypes } from '@apollo/client/react'; import type { MaybeMasked } from '@apollo/client/masking'; import type { MaybeMasked as MaybeMasked_2 } from '@apollo/client'; -import type { Observable } from 'rxjs'; import type { ObservableQuery } from '@apollo/client'; import type { OperationVariables } from '@apollo/client'; @@ -35,9 +34,9 @@ export type FetchMoreFunction = { // Warning: (ae-forgotten-export) The symbol "FragmentReferenceOptions" needs to be exported by the entry point index.d.ts constructor(client: ApolloClient, watchFragmentOptions: ApolloClient.WatchFragmentOptions & { - from: string; + from: string | null | Array; }, options: FragmentReferenceOptions); // (undocumented) readonly key: FragmentKey; @@ -59,7 +58,7 @@ class FragmentReference>): () => void; // (undocumented) - readonly observable: Observable>; + readonly observable: ApolloClient.ObservableFragment; // Warning: (ae-forgotten-export) The symbol "FragmentRefPromise" needs to be exported by the entry point index.d.ts // // (undocumented) @@ -203,7 +202,7 @@ class SuspenseCache { // // (undocumented) getFragmentRef(cacheKey: FragmentCacheKey, client: ApolloClient, options: ApolloClient.WatchFragmentOptions & { - from: string; + from: string | null | Array; }): FragmentReference; // (undocumented) getQueryRef["dataState"] = DataState["dataState"]>(cacheKey: CacheKey, createObservable: () => ObservableQuery): InternalQueryReference; diff --git a/.api-reports/api-report-utilities.api.md b/.api-reports/api-report-utilities.api.md index 7042954b4a1..df5116df8d9 100644 --- a/.api-reports/api-report-utilities.api.md +++ b/.api-reports/api-report-utilities.api.md @@ -66,7 +66,7 @@ export function concatPagination(keyArgs?: KeyArgs): FieldPolic // Warning: (ae-forgotten-export) The symbol "DeepPartialObject" needs to be exported by the entry point index.d.ts // // @public -export type DeepPartial = T extends DeepPartialPrimitive ? T : T extends Map ? DeepPartialMap : T extends ReadonlyMap ? DeepPartialReadonlyMap : T extends Set ? DeepPartialSet : T extends ReadonlySet ? DeepPartialReadonlySet : T extends (...args: any[]) => unknown ? T | undefined : T extends object ? T extends (ReadonlyArray) ? TItem[] extends (T) ? readonly TItem[] extends T ? ReadonlyArray> : Array> : DeepPartialObject : DeepPartialObject : unknown; +export type DeepPartial = T extends DeepPartialPrimitive ? T : T extends Map ? DeepPartialMap : T extends ReadonlyMap ? DeepPartialReadonlyMap : T extends Set ? DeepPartialSet : T extends ReadonlySet ? DeepPartialReadonlySet : T extends (...args: any[]) => unknown ? T | undefined : T extends object ? T extends (ReadonlyArray) ? TItem[] extends (T) ? readonly TItem[] extends T ? ReadonlyArray> : Array> : DeepPartialObject : DeepPartialObject : unknown; // @public (undocumented) type DeepPartialMap = {} & Map, DeepPartial>; diff --git a/.api-reports/api-report-utilities_internal.api.md b/.api-reports/api-report-utilities_internal.api.md index 790b197514d..109cfc7d8c9 100644 --- a/.api-reports/api-report-utilities_internal.api.md +++ b/.api-reports/api-report-utilities_internal.api.md @@ -18,7 +18,7 @@ import type { HKT } from '@apollo/client/utilities'; import type { InlineFragmentNode } from 'graphql'; import type { MaybeMasked } from '@apollo/client'; import type { NetworkStatus } from '@apollo/client'; -import type { Observable } from 'rxjs'; +import { Observable } from 'rxjs'; import type { ObservableQuery } from '@apollo/client'; import type { Observer } from 'rxjs'; import type { OperationDefinitionNode } from 'graphql'; @@ -74,6 +74,11 @@ export const checkDocument: (doc: DocumentNode, expectedType?: OperationTypeNode // @internal @deprecated export function cloneDeep(value: T): T; +// @public +export function combineLatestBatched(observables: Array & { + dirty?: boolean; +}>): Observable; + // Warning: (ae-forgotten-export) The symbol "TupleToIntersection" needs to be exported by the entry point index.d.ts // // @internal @deprecated diff --git a/.api-reports/api-report.api.md b/.api-reports/api-report.api.md index ddcaa569a04..f7c5b5d5d1f 100644 --- a/.api-reports/api-report.api.md +++ b/.api-reports/api-report.api.md @@ -37,21 +37,39 @@ type AllFieldsModifier> = Modifier extends Observable> { + getCurrentResult: () => ApolloCache.WatchFragmentResult; + } + // Warning: (ae-forgotten-export) The symbol "NoInfer_2" needs to be exported by the entry point index.d.ts + export type WatchFragmentFromValue = StoreObject | Reference | FragmentType> | string | null; export interface WatchFragmentOptions { fragment: DocumentNode | TypedDocumentNode; fragmentName?: string; - // Warning: (ae-forgotten-export) The symbol "NoInfer_2" needs to be exported by the entry point index.d.ts - from: StoreObject | Reference | FragmentType> | string; + from: ApolloCache.WatchFragmentFromValue | Array>; optimistic?: boolean; variables?: TVariables; } - export type WatchFragmentResult = ({ + // Warning: (ae-forgotten-export) The symbol "IsAny" needs to be exported by the entry point index.d.ts + // Warning: (ae-forgotten-export) The symbol "Prettify" needs to be exported by the entry point index.d.ts + export type WatchFragmentResult = true extends IsAny ? ({ + complete: true; + missing?: never; + } & GetDataState) | ({ + complete: false; + missing?: MissingTree; + } & GetDataState) : TData extends null | null[] ? Prettify<{ complete: true; missing?: never; - } & GetDataState) | ({ + } & GetDataState> : Prettify<{ + complete: true; + missing?: never; + } & GetDataState> | { complete: false; - missing: MissingTree; - } & GetDataState); + missing?: MissingTree; + data: TData extends Array ? Array | null> : DataValue.Partial; + dataState: "partial"; + }; } // @public (undocumented) @@ -78,6 +96,7 @@ export abstract class ApolloCache { lookupFragment(fragmentName: string): FragmentDefinitionNode | null; // (undocumented) modify = Record>(options: Cache_2.ModifyOptions): boolean; + protected onAfterBroadcast: (cb: () => void) => void; // (undocumented) abstract performTransaction(transaction: Transaction, optimisticId?: string | null): void; // (undocumented) @@ -107,7 +126,28 @@ export abstract class ApolloCache { updateQuery(options: Cache_2.UpdateQueryOptions, update: (data: Unmasked | null) => Unmasked | null | void): Unmasked | null; // (undocumented) abstract watch(watch: Cache_2.WatchOptions): () => void; - watchFragment(options: ApolloCache.WatchFragmentOptions): Observable>>; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions & { + from: Array>>; + }): ApolloCache.ObservableFragment>>; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions & { + from: Array; + }): ApolloCache.ObservableFragment>; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions & { + from: Array>; + }): ApolloCache.ObservableFragment | null>>; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions & { + from: null; + }): ApolloCache.ObservableFragment; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions & { + from: NonNullable>; + }): ApolloCache.ObservableFragment>; + // (undocumented) + watchFragment(options: ApolloCache.WatchFragmentOptions): ApolloCache.ObservableFragment | null>; // (undocumented) abstract write(write: Cache_2.WriteOptions): Reference | undefined; writeFragment({ data, fragment, fragmentName, variables, overwrite, id, broadcast, }: Cache_2.WriteFragmentOptions): Reference | undefined; @@ -234,6 +274,10 @@ export namespace ApolloClient { extensions?: Record; } // (undocumented) + export interface ObservableFragment extends Observable> { + getCurrentResult: () => ApolloClient.WatchFragmentResult; + } + // (undocumented) export interface Options { assumeImmutableResults?: boolean; cache: ApolloCache; @@ -310,7 +354,7 @@ export namespace ApolloClient { // (undocumented) export type WatchFragmentOptions = ApolloCache.WatchFragmentOptions; // (undocumented) - export type WatchFragmentResult = ApolloCache.WatchFragmentResult; + export type WatchFragmentResult = ApolloCache.WatchFragmentResult>; export type WatchQueryOptions = { fetchPolicy?: WatchQueryFetchPolicy; nextFetchPolicy?: WatchQueryFetchPolicy | ((this: WatchQueryOptions, currentFetchPolicy: WatchQueryFetchPolicy, context: NextFetchPolicyContext) => WatchQueryFetchPolicy); @@ -383,7 +427,23 @@ export class ApolloClient { subscribe(options: ApolloClient.SubscribeOptions): SubscriptionObservable>>; // (undocumented) version: string; - watchFragment(options: ApolloClient.WatchFragmentOptions): Observable>>; + watchFragment(options: ApolloClient.WatchFragmentOptions & { + from: Array>>; + }): ApolloClient.ObservableFragment>; + watchFragment(options: ApolloClient.WatchFragmentOptions & { + from: Array; + }): ApolloClient.ObservableFragment>; + // (undocumented) + watchFragment(options: ApolloClient.WatchFragmentOptions & { + from: Array>; + }): ApolloClient.ObservableFragment>; + watchFragment(options: ApolloClient.WatchFragmentOptions & { + from: null; + }): ApolloClient.ObservableFragment; + watchFragment(options: ApolloClient.WatchFragmentOptions & { + from: NonNullable>; + }): ApolloClient.ObservableFragment; + watchFragment(options: ApolloClient.WatchFragmentOptions): ApolloClient.ObservableFragment; watchQuery(options: ApolloClient.WatchQueryOptions): ObservableQuery; writeFragment(options: ApolloClient.WriteFragmentOptions): Reference | undefined; writeQuery(options: ApolloClient.WriteQueryOptions): Reference | undefined; @@ -847,7 +907,7 @@ export namespace DataValue { // Warning: (ae-forgotten-export) The symbol "DeepPartialObject" needs to be exported by the entry point index.d.ts // // @public -type DeepPartial = T extends DeepPartialPrimitive ? T : T extends Map ? DeepPartialMap : T extends ReadonlyMap ? DeepPartialReadonlyMap : T extends Set ? DeepPartialSet : T extends ReadonlySet ? DeepPartialReadonlySet : T extends (...args: any[]) => unknown ? T | undefined : T extends object ? T extends (ReadonlyArray) ? TItem[] extends (T) ? readonly TItem[] extends T ? ReadonlyArray> : Array> : DeepPartialObject : DeepPartialObject : unknown; +type DeepPartial = T extends DeepPartialPrimitive ? T : T extends Map ? DeepPartialMap : T extends ReadonlyMap ? DeepPartialReadonlyMap : T extends Set ? DeepPartialSet : T extends ReadonlySet ? DeepPartialReadonlySet : T extends (...args: any[]) => unknown ? T | undefined : T extends object ? T extends (ReadonlyArray) ? TItem[] extends (T) ? readonly TItem[] extends T ? ReadonlyArray> : Array> : DeepPartialObject : DeepPartialObject : unknown; // Warning: (ae-forgotten-export) The symbol "DeepPartial" needs to be exported by the entry point index.d.ts // @@ -2149,6 +2209,11 @@ namespace PreserveTypes { type Unmasked = TData; } +// @internal @deprecated (undocumented) +type Prettify = { + [K in keyof T]: T[K]; +} & {}; + // @internal @deprecated (undocumented) type Primitive = null | undefined | string | number | boolean | symbol | bigint; @@ -2372,8 +2437,6 @@ type RefetchQueriesIncludeShorthand = "all" | "active"; // @public @deprecated (undocumented) export type RefetchQueriesOptions = ApolloClient.RefetchQueriesOptions; -// Warning: (ae-forgotten-export) The symbol "IsAny" needs to be exported by the entry point index.d.ts -// // @public (undocumented) export type RefetchQueriesPromiseResults = IsAny extends true ? any[] : TResult extends boolean ? ApolloClient.QueryResult[] : TResult extends PromiseLike ? U[] : TResult[]; @@ -2718,13 +2781,13 @@ interface WriteContext extends ReadMergeModifyContext { // Warnings were encountered during analysis: // -// src/cache/core/cache.ts:94:9 - (ae-forgotten-export) The symbol "MissingTree" needs to be exported by the entry point index.d.ts +// src/cache/core/cache.ts:123:11 - (ae-forgotten-export) The symbol "MissingTree" needs to be exported by the entry point index.d.ts // src/cache/inmemory/policies.ts:98:3 - (ae-forgotten-export) The symbol "FragmentMap" needs to be exported by the entry point index.d.ts // src/cache/inmemory/policies.ts:167:3 - (ae-forgotten-export) The symbol "KeySpecifier" needs to be exported by the entry point index.d.ts // src/cache/inmemory/policies.ts:167:3 - (ae-forgotten-export) The symbol "KeyArgsFunction" needs to be exported by the entry point index.d.ts // src/cache/inmemory/types.ts:134:3 - (ae-forgotten-export) The symbol "KeyFieldsFunction" needs to be exported by the entry point index.d.ts // src/core/ApolloClient.ts:168:5 - (ae-forgotten-export) The symbol "IgnoreModifier" needs to be exported by the entry point index.d.ts -// src/core/ApolloClient.ts:362:5 - (ae-forgotten-export) The symbol "NextFetchPolicyContext" needs to be exported by the entry point index.d.ts +// src/core/ApolloClient.ts:370:5 - (ae-forgotten-export) The symbol "NextFetchPolicyContext" needs to be exported by the entry point index.d.ts // src/core/ObservableQuery.ts:368:5 - (ae-forgotten-export) The symbol "QueryManager" needs to be exported by the entry point index.d.ts // src/core/QueryManager.ts:180:5 - (ae-forgotten-export) The symbol "MutationStoreValue" needs to be exported by the entry point index.d.ts // src/local-state/LocalState.ts:149:5 - (ae-forgotten-export) The symbol "LocalState" needs to be exported by the entry point index.d.ts diff --git a/.changeset/famous-hats-explode.md b/.changeset/famous-hats-explode.md new file mode 100644 index 00000000000..6a924e646d2 --- /dev/null +++ b/.changeset/famous-hats-explode.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +Deduplicate watches created by `useFragment`, `client.watchFragment`, and `cache.watchFragment` that contain the same fragment, variables, and identifier. This should improve performance in situations where a `useFragment` or a `client.watchFragment` is used to watch the same object in multiple places of an application. diff --git a/.changeset/large-ligers-prove.md b/.changeset/large-ligers-prove.md new file mode 100644 index 00000000000..dcc9d9d9e1d --- /dev/null +++ b/.changeset/large-ligers-prove.md @@ -0,0 +1,13 @@ +--- +"@apollo/client": minor +--- + +Add support for `from: null` in `client.watchFragment` and `cache.watchFragment`. When `from` is `null`, the emitted result is: + +```ts +{ + data: null, + dataState: "complete", + complete: true, +} +``` diff --git a/.changeset/old-singers-eat.md b/.changeset/old-singers-eat.md new file mode 100644 index 00000000000..84c8ffa53e3 --- /dev/null +++ b/.changeset/old-singers-eat.md @@ -0,0 +1,17 @@ +--- +"@apollo/client": minor +--- + +Add support for arrays with `useFragment`, `useSuspenseFragment`, and `client.watchFragment`. This allows the ability to use a fragment to watch multiple entities in the cache. Passing an array to `from` will return `data` as an array where each array index corresponds to the index in the `from` array. + +```ts +function MyComponent() { + const result = useFragment({ + fragment, + from: [item1, item2, item3] + }); + + // `data` is an array with 3 items + console.log(result); // { data: [{...}, {...}, {...}], dataState: "complete", complete: true } +} +``` diff --git a/.changeset/poor-knives-smile.md b/.changeset/poor-knives-smile.md new file mode 100644 index 00000000000..9294c18f6b9 --- /dev/null +++ b/.changeset/poor-knives-smile.md @@ -0,0 +1,19 @@ +--- +"@apollo/client": minor +--- + +Add a `getCurrentResult` function to the observable returned by `client.watchFragment` and `cache.watchFragment` that returns the current value for the watched fragment. + +```ts +const observable = client.watchFragment({ + fragment, + from: { __typename: 'Item', id: 1 } +}) + +console.log(observable.getCurrentResult()); +// { +// data: {...}, +// dataState: "complete", +// complete: true, +// } +``` diff --git a/.changeset/spicy-eels-switch.md b/.changeset/spicy-eels-switch.md new file mode 100644 index 00000000000..b65d6362a85 --- /dev/null +++ b/.changeset/spicy-eels-switch.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +`DeepPartial>` now returns `Array>` instead of `Array>`. diff --git a/.size-limits.json b/.size-limits.json index e9766e87bd3..85e88029cd1 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44773, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39426, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33875, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27756 + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 45497, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 40156, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 34540, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 28414 } diff --git a/config/jest.config.ts b/config/jest.config.ts index 4cd48b1fd7c..8fded0c2cd4 100644 --- a/config/jest.config.ts +++ b/config/jest.config.ts @@ -48,6 +48,7 @@ const react17TestFileIgnoreList = [ // React 17 "src/testing/experimental/__tests__/createTestSchema.test.tsx", "src/react/hooks/__tests__/useSuspenseFragment.test.tsx", + "src/react/hooks/__tests__/useSuspenseFragment/*", "src/react/hooks/__tests__/useSuspenseQuery.test.tsx", "src/react/hooks/__tests__/useSuspenseQuery/*", "src/react/hooks/__tests__/useBackgroundQuery.test.tsx", diff --git a/docs/source/data/fragments.mdx b/docs/source/data/fragments.mdx index 8a58f2959cf..7f40c6518da 100644 --- a/docs/source/data/fragments.mdx +++ b/docs/source/data/fragments.mdx @@ -518,7 +518,7 @@ function List() { } ``` - + Instead of interpolating fragments within each query document, you can use Apollo Client's `createFragmentRegistry` method to pre-register named fragments with `InMemoryCache`. This allows Apollo Client to include the @@ -526,7 +526,7 @@ function List() { before the request is sent. For more information, see [Registering named fragments using `createFragmentRegistry`](#registering-named-fragments-using-createfragmentregistry). - + We can then use `useFragment` from within the `` component to create a live binding for each item by providing the `fragment` document, `fragmentName` and object reference via `from`. @@ -564,10 +564,10 @@ function Item(props) { - + You may omit the `fragmentName` option when your fragment definition only includes a single fragment. - + You may instead prefer to pass the whole `item` as a prop to the `Item` component. This makes the `from` option more concise. @@ -576,7 +576,7 @@ You may instead prefer to pass the whole `item` as a prop to the `Item` componen ```tsx function Item(props: { item: { __typename: "Item"; id: number } }) { const { complete, data } = useFragment({ - fragment: ItemFragment, + fragment: ITEM_FRAGMENT, fragmentName: "ItemFragment", from: props.item, }); @@ -608,6 +608,114 @@ function Item(props) { See the [API reference](../api/react/useFragment) for more details on the supported options. + + +### Working with arrays + + + +Sometimes your component might use a fragment to select fields for an array of items that are received from props. You can use the `useFragment` hook to watch for changes on each array item by providing the array to the `from` option. + +When you provide an array to the `from` option, the `data` property returned from `useFragment` is an array where each item corresponds to an item with the same index in the `from` option. If all of the items returned in `data` are complete, the `complete` property is set to `true` and the `dataState` property is set to `"complete"`. If at least one item in the array is incomplete, the `complete` property is set to `false` and the `dataState` property is set to `"partial"`. + + + +```tsx +function Items(props: { items: Array<{ __typename: "Item"; id: number }> }) { + const { data, complete } = useFragment({ + fragment: ITEM_FRAGMENT, + fragmentName: "ItemFragment", + from: props.items, + }); + + if (!complete) { + return null; + } + + return ( +
    + {data.map((item) => ( +
  • {item.text}
  • + ))} +
+ ); +} +``` + +```js +function Items(props) { + const { data, complete } = useFragment({ + fragment: ITEM_FRAGMENT, + fragmentName: "ItemFragment", + from: props.items, + }); + + if (!complete) { + return null; + } + + return ( +
    + {data.map((item) => ( +
  • {item.text}
  • + ))} +
+ ); +} +``` + +
+ + + +If the array provided to the `from` option is an empty array, the returned `data` is an empty array with the `complete` property set to `true` and `dataState` property set to `"complete"`. + + + +#### Handling `null` values + +Depending on the GraphQL schema, it's possible the array might contain `null` values. When `useFragment` is provided an array that contains `null` values to the `from` property, `useFragment` returns those items as `null` in the `data` property and treats these items as complete. This means if all non-`null` items in the array are also complete, the whole result is complete. + +```ts +const { data, dataState, complete } = useFragment({ + fragment: ITEM_FRAGMENT, + fragmentName: "ItemFragment", + from: [{ __typename: "Item", id: 1 }, { __typename: "Item", id: 2 }, null], +}); + +console.log({ data, dataState, complete }); +// { +// data: [ +// { __typename: "Item", id: 1, text: "..." }, +// { __typename: "Item", id: 2, text: "..." }, +// null +// ] +// dataState: "complete", +// complete: true +// } +``` + + + +If the `from` array contains `null` values for every item, the result returned from `useFragment` contains all `null` values, the `complete` property is set to `true`, and the `dataState` property is set to `"complete"`. + +```ts +const { data, dataState, complete } = useFragment({ + fragment: ITEM_FRAGMENT, + fragmentName: "ItemFragment", + from: [null, null, null], +}); + +console.log({ data, dataState, complete }); +// { +// data: [null, null, null], +// dataState: "complete", +// complete: true +// } +``` + + + ## `useSuspenseFragment` For those that have integrated with React [Suspense](https://react.dev/reference/react/Suspense), `useSuspenseFragment` is available as a drop-in replacement for `useFragment`. `useSuspenseFragment` works identically to `useFragment` but will suspend while `data` is incomplete. diff --git a/knip.config.js b/knip.config.js index edecf0d3204..42b4feb18dd 100644 --- a/knip.config.js +++ b/knip.config.js @@ -47,6 +47,8 @@ const config = { "src/config/jest/resolver.ts", "config/listImports.ts", "scripts/codemods/**/__testfixtures__/**/*", + // Exports `KeyOptions` used in `matchers/index.d.ts`, but can't pick it up + "src/testing/matchers/toHaveFragmentWatchesOn.ts", ], ignoreBinaries: ["jq"], ignoreDependencies: [ diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap index 2dd66642204..82141a6b4b0 100644 --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -431,6 +431,7 @@ Array [ "canonicalStringify", "checkDocument", "cloneDeep", + "combineLatestBatched", "compact", "createFragmentMap", "createFulfilledPromise", diff --git a/src/cache/core/__tests__/cache.watchFragment/types.test.ts b/src/cache/core/__tests__/cache.watchFragment/types.test.ts new file mode 100644 index 00000000000..7ee8b472a27 --- /dev/null +++ b/src/cache/core/__tests__/cache.watchFragment/types.test.ts @@ -0,0 +1,277 @@ +import { expectTypeOf } from "expect-type"; + +import type { + DataValue, + Reference, + StoreObject, + TypedDocumentNode, +} from "@apollo/client"; +import { InMemoryCache } from "@apollo/client"; +import type { ApolloCache, MissingTree } from "@apollo/client/cache"; + +describe.skip("type tests", () => { + interface Item { + __typename: "Item"; + id: number; + text: string; + } + + const cache = new InMemoryCache(); + let fragment!: TypedDocumentNode>; + + test("from: null -> null", () => { + const observable = cache.watchFragment({ fragment, from: null }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloCache.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf<{ + data: null; + dataState: "complete"; + complete: true; + missing?: never; + }>(); + }); + + test("from: StoreObject -> TData", () => { + const observable = cache.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 }, + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloCache.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: string -> TData", () => { + const observable = cache.watchFragment({ fragment, from: "Item:1" }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloCache.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: Reference -> TData", () => { + const observable = cache.watchFragment({ + fragment, + from: { __ref: "Item:1" }, + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloCache.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: StoreObject | null -> TData | null", () => { + const observable = cache.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 } as StoreObject | null, + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloCache.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: null; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: string | null -> TData | null", () => { + const observable = cache.watchFragment({ + fragment, + from: "Item:1" as string | null, + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloCache.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: null; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: Reference | null -> TData | null", () => { + const observable = cache.watchFragment({ + fragment, + from: { __ref: "Item:1" } as Reference | null, + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloCache.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: null; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: Array -> Array", () => { + const observable = cache.watchFragment({ + fragment, + from: [null], + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloCache.ObservableFragment> + >(); + expectTypeOf(result).toEqualTypeOf<{ + data: Array; + dataState: "complete"; + complete: true; + missing?: never; + }>(); + }); + + test("from: Array -> Array", () => { + const observable = cache.watchFragment({ + fragment, + from: [{ __typename: "Item", id: 1 }, "Item:1", { __ref: "Item:1" }], + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloCache.ObservableFragment> + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: Array; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: Array | null>; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: Array -> Array", () => { + const observable = cache.watchFragment({ + fragment, + from: [{ __typename: "Item", id: 1 }, null], + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloCache.ObservableFragment> + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: Array; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: Array | null>; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); +}); diff --git a/src/cache/core/cache.ts b/src/cache/core/cache.ts index 3e98626633d..d39ed6f40d0 100644 --- a/src/cache/core/cache.ts +++ b/src/cache/core/cache.ts @@ -1,23 +1,40 @@ import { WeakCache } from "@wry/caches"; +import { equal } from "@wry/equality"; +import { Trie } from "@wry/trie"; import type { DocumentNode, FragmentDefinitionNode, InlineFragmentNode, } from "graphql"; import { wrap } from "optimism"; -import { Observable } from "rxjs"; +import { + distinctUntilChanged, + map, + Observable, + ReplaySubject, + share, + shareReplay, + tap, + timer, +} from "rxjs"; import type { + DataValue, GetDataState, OperationVariables, TypedDocumentNode, } from "@apollo/client"; import type { FragmentType, Unmasked } from "@apollo/client/masking"; import type { Reference, StoreObject } from "@apollo/client/utilities"; -import { cacheSizes } from "@apollo/client/utilities"; +import { cacheSizes, canonicalStringify } from "@apollo/client/utilities"; import { __DEV__ } from "@apollo/client/utilities/environment"; -import type { NoInfer } from "@apollo/client/utilities/internal"; +import type { + IsAny, + NoInfer, + Prettify, +} from "@apollo/client/utilities/internal"; import { + combineLatestBatched, equalByQuery, getApolloCacheMemoryInternals, getFragmentDefinition, @@ -33,6 +50,15 @@ import type { MissingTree } from "./types/common.js"; export type Transaction = (c: ApolloCache) => void; export declare namespace ApolloCache { + /** + * Acceptable values provided to the `from` option for `watchFragment`. + */ + export type WatchFragmentFromValue = + | StoreObject + | Reference + | FragmentType> + | string + | null; /** * Watched fragment options. */ @@ -55,7 +81,9 @@ export declare namespace ApolloCache { * * @docGroup 1. Required options */ - from: StoreObject | Reference | FragmentType> | string; + from: + | ApolloCache.WatchFragmentFromValue + | Array>; /** * Any variables that the GraphQL fragment may depend on. * @@ -85,14 +113,46 @@ export declare namespace ApolloCache { * Watched fragment results. */ export type WatchFragmentResult = - | ({ - complete: true; - missing?: never; - } & GetDataState) - | ({ - complete: false; - missing: MissingTree; - } & GetDataState); + true extends IsAny ? + | ({ + complete: true; + missing?: never; + } & GetDataState) + | ({ + complete: false; + missing?: MissingTree; + } & GetDataState) + : TData extends null | null[] ? + Prettify< + { + complete: true; + missing?: never; + } & GetDataState + > + : | Prettify< + { + complete: true; + missing?: never; + } & GetDataState + > + | { + complete: false; + missing?: MissingTree; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#data:member} */ + data: TData extends Array ? + Array | null> + : DataValue.Partial; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#dataState:member} */ + dataState: "partial"; + }; + + export interface ObservableFragment + extends Observable> { + /** + * Return the current result for the fragment. + */ + getCurrentResult: () => ApolloCache.WatchFragmentResult; + } } export abstract class ApolloCache { @@ -309,95 +369,287 @@ export abstract class ApolloCache { }); } + private fragmentWatches = new Trie<{ + observable?: Observable & { dirty: boolean }; + }>(true); + + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloCache.WatchFragmentOptions & { + from: Array>>; + } + ): ApolloCache.ObservableFragment>>; + + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloCache.WatchFragmentOptions & { + from: Array; + } + ): ApolloCache.ObservableFragment>; + + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloCache.WatchFragmentOptions & { + from: Array>; + } + ): ApolloCache.ObservableFragment | null>>; + + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloCache.WatchFragmentOptions & { + from: null; + } + ): ApolloCache.ObservableFragment; + + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloCache.WatchFragmentOptions & { + from: NonNullable>; + } + ): ApolloCache.ObservableFragment>; + + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloCache.WatchFragmentOptions + ): ApolloCache.ObservableFragment | null>; + /** {@inheritDoc @apollo/client!ApolloClient#watchFragment:member(1)} */ public watchFragment< TData = unknown, TVariables extends OperationVariables = OperationVariables, >( options: ApolloCache.WatchFragmentOptions - ): Observable>> { + ): + | ApolloCache.ObservableFragment | null> + | ApolloCache.ObservableFragment | null>> { const { fragment, fragmentName, from, optimistic = true, - ...otherOptions + variables, } = options; - const query = this.getFragmentDoc(fragment, fragmentName); - // While our TypeScript types do not allow for `undefined` as a valid - // `from`, its possible `useFragment` gives us an `undefined` since it - // calls` cache.identify` and provides that value to `from`. We are - // adding this fix here however to ensure those using plain JavaScript - // and using `cache.identify` themselves will avoid seeing the obscure - // warning. - const id = - typeof from === "undefined" || typeof from === "string" ? - from - : this.identify(from); - - if (__DEV__) { - const actualFragmentName = - fragmentName || getFragmentDefinition(fragment).name.value; - - if (!id) { - invariant.warn( - "Could not identify object passed to `from` for '%s' fragment, either because the object is non-normalized or the key fields are missing. If you are masking this object, please ensure the key fields are requested by the parent object.", - actualFragmentName + const query = this.getFragmentDoc( + fragment, + fragmentName + ) as TypedDocumentNode; + + const fromArray = Array.isArray(from) ? from : [from]; + + const ids = fromArray.map((value) => { + // While our TypeScript types do not allow for `undefined` as a valid + // `from`, its possible `useFragment` gives us an `undefined` since it + // calls` cache.identify` and provides that value to `from`. We are + // adding this fix here however to ensure those using plain JavaScript + // and using `cache.identify` themselves will avoid seeing the obscure + // warning. + const id = + ( + typeof value === "undefined" || + typeof value === "string" || + value === null + ) ? + value + : this.identify(value); + + if (__DEV__) { + const actualFragmentName = + fragmentName || getFragmentDefinition(fragment).name.value; + + if (id === undefined) { + invariant.warn( + "Could not identify object passed to `from` for '%s' fragment, either because the object is non-normalized or the key fields are missing. If you are masking this object, please ensure the key fields are requested by the parent object.", + actualFragmentName + ); + } + } + + return id as string | null; + }); + + let currentResult: ApolloCache.WatchFragmentResult; + function toResult( + diffs: Array | null>> + ): ApolloCache.WatchFragmentResult { + let result: ApolloCache.WatchFragmentResult; + if (Array.isArray(from)) { + result = diffs.reduce( + (result, diff, idx) => { + result.data.push(diff.result as any); + result.complete &&= diff.complete; + result.dataState = result.complete ? "complete" : "partial"; + + if (diff.missing) { + result.missing ||= {}; + (result.missing as any)[idx] = diff.missing.missing; + } + + return result; + }, + { + data: [], + dataState: "complete", + complete: true, + } as ApolloCache.WatchFragmentResult ); + } else { + const [diff] = diffs; + result = { + // Unfortunately we forgot to allow for `null` on watchFragment in 4.0 + // when `from` is a single record. As such, we need to fallback to {} + // when diff.result is null to maintain backwards compatibility. We + // should plan to change this in v5. We do howeever support `null` if + // `from` is explicitly `null`. + // + // NOTE: Using `from` with an array will maintain `null` properly + // without the need for a similar fallback since watchFragment with + // arrays is new functionality in v4. + data: from === null ? diff.result : diff.result ?? {}, + complete: diff.complete, + dataState: diff.complete ? "complete" : "partial", + } as ApolloCache.WatchFragmentResult>; + + if (diff.missing) { + result.missing = diff.missing.missing; + } } - } - const diffOptions: Cache.DiffOptions = { - ...otherOptions, - returnPartialData: true, - id, - query, - optimistic, - }; + if (!equal(currentResult, result)) { + currentResult = result; + } + + return currentResult; + } - let latestDiff: Cache.DiffResult | undefined; + let subscribed = false; + const observable = + ids.length === 0 ? + emptyArrayObservable + : combineLatestBatched( + ids.map((id) => this.watchSingleFragment(id, query, options)) + ).pipe( + map(toResult), + tap({ + subscribe: () => (subscribed = true), + unsubscribe: () => (subscribed = false), + }), + shareReplay({ bufferSize: 1, refCount: true }) + ); - return new Observable((observer) => { - return this.watch({ - ...diffOptions, - immediate: true, - callback: (diff) => { - let data = diff.result; + return Object.assign(observable, { + getCurrentResult: () => { + if (subscribed && currentResult) { + return currentResult as any; + } - // TODO: Remove this once `watchFragment` supports `null` as valid - // value emitted - if (data === null) { - data = {} as any; - } + const diffs = ids.map( + (id): Cache.DiffResult | null> => { + if (id === null) { + return { result: null, complete: true }; + } - if ( - // Always ensure we deliver the first result - latestDiff && - equalByQuery( + return this.diff>({ + id, query, - { data: latestDiff.result }, - { data }, - options.variables - ) - ) { - return; + returnPartialData: true, + optimistic, + variables, + }); } + ); - const result = { - data, - dataState: diff.complete ? "complete" : "partial", - complete: !!diff.complete, - } as ApolloCache.WatchFragmentResult>; + return toResult(diffs); + }, + } satisfies Pick< + | ApolloCache.ObservableFragment | null> + | ApolloCache.ObservableFragment | null>>, + "getCurrentResult" + >) as any; + } - if (diff.missing) { - result.missing = diff.missing.missing; - } + /** + * Can be overridden by subclasses to delay calling the provided callback + * until after all broadcasts have been completed - e.g. in a cache scenario + * where many watchers are notified in parallel. + */ + protected onAfterBroadcast = (cb: () => void) => cb(); + private watchSingleFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + id: string | null, + fragmentQuery: TypedDocumentNode, + options: Omit< + ApolloCache.WatchFragmentOptions, + "from" | "fragment" | "fragmentName" + > + ): Observable | null>> & { dirty: boolean } { + if (id === null) { + return nullObservable; + } - latestDiff = { ...diff, result: data } as Cache.DiffResult; - observer.next(result); - }, - }); - }); + const { optimistic = true, variables } = options; + + const cacheKey = [ + fragmentQuery, + canonicalStringify({ id, optimistic, variables }), + ]; + const cacheEntry = this.fragmentWatches.lookupArray(cacheKey); + + if (!cacheEntry.observable) { + const observable: Observable> & { + dirty?: boolean; + } = new Observable>((observer) => { + const cleanup = this.watch({ + variables, + returnPartialData: true, + id, + query: fragmentQuery, + optimistic, + immediate: true, + callback: (diff) => { + observable.dirty = true; + this.onAfterBroadcast(() => { + observer.next(diff); + observable.dirty = false; + }); + }, + }); + return () => { + cleanup(); + this.fragmentWatches.removeArray(cacheKey); + }; + }).pipe( + distinctUntilChanged((previous, current) => + equalByQuery( + fragmentQuery, + { data: previous.result }, + { data: current.result }, + options.variables + ) + ), + share({ + connector: () => new ReplaySubject(1), + // debounce so a synchronous unsubscribe+resubscribe doesn't tear down the watch and create a new one + resetOnRefCountZero: () => timer(0), + }) + ); + cacheEntry.observable = Object.assign(observable, { dirty: false }); + } + + return cacheEntry.observable; } // Make sure we compute the same (===) fragment query document every @@ -569,3 +821,20 @@ export abstract class ApolloCache { if (__DEV__) { ApolloCache.prototype.getMemoryInternals = getApolloCacheMemoryInternals; } + +const nullObservable = Object.assign( + new Observable>((observer) => { + observer.next({ result: null, complete: true }); + }), + { dirty: false } +); + +const emptyArrayObservable = new Observable< + ApolloCache.WatchFragmentResult +>((observer) => { + observer.next({ + data: [], + dataState: "complete", + complete: true, + }); +}); diff --git a/src/cache/inmemory/inMemoryCache.ts b/src/cache/inmemory/inMemoryCache.ts index 102ee0b2f51..60255baf691 100644 --- a/src/cache/inmemory/inMemoryCache.ts +++ b/src/cache/inmemory/inMemoryCache.ts @@ -536,7 +536,17 @@ export class InMemoryCache extends ApolloCache { protected broadcastWatches(options?: BroadcastOptions) { if (!this.txCount) { - this.watches.forEach((c) => this.maybeBroadcastWatch(c, options)); + const prevOnAfter = this.onAfterBroadcast; + const callbacks = new Set<() => void>(); + this.onAfterBroadcast = (cb: () => void) => { + callbacks.add(cb); + }; + try { + this.watches.forEach((c) => this.maybeBroadcastWatch(c, options)); + callbacks.forEach((cb) => cb()); + } finally { + this.onAfterBroadcast = prevOnAfter; + } } } diff --git a/src/core/ApolloClient.ts b/src/core/ApolloClient.ts index 0089eb2b32e..545af418a06 100644 --- a/src/core/ApolloClient.ts +++ b/src/core/ApolloClient.ts @@ -346,7 +346,15 @@ export declare namespace ApolloClient { > = ApolloCache.WatchFragmentOptions; export type WatchFragmentResult = - ApolloCache.WatchFragmentResult; + ApolloCache.WatchFragmentResult>; + + export interface ObservableFragment + extends Observable> { + /** + * Return the current result for the fragment. + */ + getCurrentResult: () => ApolloClient.WatchFragmentResult; + } /** * Watched query options. @@ -1114,42 +1122,114 @@ export class ApolloClient { * the cache to identify the fragment and optionally specify whether to react * to optimistic updates. */ + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloClient.WatchFragmentOptions & { + from: Array>>; + } + ): ApolloClient.ObservableFragment>; + + /** {@inheritDoc @apollo/client!ApolloClient#watchFragment:member(1)} */ + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloClient.WatchFragmentOptions & { + from: Array; + } + ): ApolloClient.ObservableFragment>; + + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloClient.WatchFragmentOptions & { + from: Array>; + } + ): ApolloClient.ObservableFragment>; + + /** {@inheritDoc @apollo/client!ApolloClient#watchFragment:member(1)} */ + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloClient.WatchFragmentOptions & { + from: null; + } + ): ApolloClient.ObservableFragment; + /** {@inheritDoc @apollo/client!ApolloClient#watchFragment:member(1)} */ + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloClient.WatchFragmentOptions & { + from: NonNullable>; + } + ): ApolloClient.ObservableFragment; + + /** {@inheritDoc @apollo/client!ApolloClient#watchFragment:member(1)} */ public watchFragment< TData = unknown, TVariables extends OperationVariables = OperationVariables, >( options: ApolloClient.WatchFragmentOptions - ): Observable>> { + ): ApolloClient.ObservableFragment; + + public watchFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >( + options: ApolloClient.WatchFragmentOptions + ): + | ApolloClient.ObservableFragment + | ApolloClient.ObservableFragment> { const dataMasking = this.queryManager.dataMasking; + const observable = this.cache.watchFragment({ + ...options, + fragment: this.transform(options.fragment, dataMasking), + }); - return this.cache - .watchFragment({ - ...options, - fragment: this.transform(options.fragment, dataMasking), - }) - .pipe( - map((result) => { - // The transform will remove fragment spreads from the fragment - // document when dataMasking is enabled. The `maskFragment` function - // remains to apply warnings to fragments marked as - // `@unmask(mode: "migrate")`. Since these warnings are only applied - // in dev, we can skip the masking algorithm entirely for production. - if (__DEV__) { - if (dataMasking) { - const data = this.queryManager.maskFragment({ - ...options, - data: result.data, - }); - return { ...result, data } as ApolloClient.WatchFragmentResult< - MaybeMasked - >; - } - } + const mask = ( + result: ApolloClient.WatchFragmentResult + ): ApolloClient.WatchFragmentResult => { + // The transform will remove fragment spreads from the fragment + // document when dataMasking is enabled. The `mask` function + // remains to apply warnings to fragments marked as + // `@unmask(mode: "migrate")`. Since these warnings are only applied + // in dev, we can skip the masking algorithm entirely for production. + if (__DEV__) { + if (dataMasking) { + return { + ...result, + data: this.queryManager.maskFragment({ + ...options, + data: result.data, + }), + } as ApolloClient.WatchFragmentResult>; + } + } - return result as ApolloClient.WatchFragmentResult>; - }) - ); + return result; + }; + + let currentResult: ApolloClient.WatchFragmentResult; + let stableMaskedResult: ApolloClient.WatchFragmentResult; + + return Object.assign(observable.pipe(map(mask)), { + getCurrentResult: () => { + const result = observable.getCurrentResult(); + + if (result !== currentResult) { + currentResult = result as any; + stableMaskedResult = mask(currentResult); + } + + return stableMaskedResult; + }, + }) as ApolloClient.ObservableFragment; } /** diff --git a/src/core/__tests__/client.watchFragment/arrays.test.ts b/src/core/__tests__/client.watchFragment/arrays.test.ts new file mode 100644 index 00000000000..dec386f1885 --- /dev/null +++ b/src/core/__tests__/client.watchFragment/arrays.test.ts @@ -0,0 +1,1030 @@ +import type { TypedDocumentNode } from "@apollo/client"; +import { ApolloClient, ApolloLink, gql, InMemoryCache } from "@apollo/client"; +import { ObservableStream, wait } from "@apollo/client/testing/internal"; + +test("can use array for `from` to get array of items", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + const observable = client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); + + await expect(stream).not.toEmitAnything(); +}); + +test("allows mix of array identifiers", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + const observable = client.watchFragment({ + fragment, + from: [{ __typename: "Item", id: 1 }, "Item:2", { __ref: "Item:3" }], + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 3, text: "Item #3" }, + ], + dataState: "complete", + complete: true, + }); + + await expect(stream).not.toEmitAnything(); +}); + +test("returns empty array with empty from", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + const observable = client.watchFragment({ fragment, from: [] }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: [], + dataState: "complete", + complete: true, + }); + await expect(stream).not.toEmitAnything(); +}); + +test("returns result as partial when cache is empty", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + const observable = client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: [null, null, null], + dataState: "partial", + complete: false, + missing: { + 0: "Dangling reference to missing Item:1 object", + 1: "Dangling reference to missing Item:2 object", + 2: "Dangling reference to missing Item:5 object", + }, + }); + + await expect(stream).not.toEmitAnything(); +}); + +test("returns as complete if all `from` items are null", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + const observable = client.watchFragment({ + fragment, + from: [null, null, null], + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: [null, null, null], + dataState: "complete", + complete: true, + }); + + await expect(stream).not.toEmitAnything(); +}); + +test("returns as complete if all `from` items are complete or null", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 5, text: "Item #5" }, + }); + + const observable = client.watchFragment({ + fragment, + from: [null, null, { __typename: "Item", id: 5 }], + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: [null, null, { __typename: "Item", id: 5, text: "Item #5" }], + dataState: "complete", + complete: true, + }); + + await expect(stream).not.toEmitAnything(); +}); + +test("returns as partial if some `from` items are incomplete mixed with null", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + const observable = client.watchFragment({ + fragment, + from: [null, null, { __typename: "Item", id: 5 }], + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: [null, null, null], + dataState: "partial", + complete: false, + missing: { + 2: "Dangling reference to missing Item:5 object", + }, + }); + + await expect(stream).not.toEmitAnything(); +}); + +test("can use static arrays with useFragment with partially fulfilled items", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 2; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + const observable = client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + null, + ], + dataState: "partial", + complete: false, + missing: { 2: "Dangling reference to missing Item:5 object" }, + }); + + await expect(stream).not.toEmitAnything(); +}); + +test("updates items in the array with cache writes", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + const { cache } = client; + + for (let i = 1; i <= 2; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + const observable = client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + null, + ], + dataState: "partial", + complete: false, + missing: { + 2: "Dangling reference to missing Item:5 object", + }, + }); + + client.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 2, + text: "Item #2 updated", + }, + }); + + await expect(stream).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + null, + ], + dataState: "partial", + complete: false, + missing: { + 2: "Dangling reference to missing Item:5 object", + }, + }); + + client.cache.batch({ + update: (cache) => { + cache.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 1, + text: "Item #1 from batch", + }, + }); + + cache.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 5, + text: "Item #5 from batch", + }, + }); + }, + }); + + await expect(stream).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1 from batch" }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + { __typename: "Item", id: 5, text: "Item #5 from batch" }, + ], + dataState: "complete", + complete: true, + }); + + cache.modify({ + id: cache.identify({ __typename: "Item", id: 1 }), + fields: { + text: (_, { DELETE }) => DELETE, + }, + }); + + await expect(stream).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + { __typename: "Item", id: 5, text: "Item #5 from batch" }, + ], + dataState: "partial", + complete: false, + missing: { + 0: { + text: "Can't find field 'text' on Item:1 object", + }, + }, + }); + + // should not cause rerender since its an item not watched + client.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 6, + text: "Item #6 ignored", + }, + }); + + await expect(stream).not.toEmitAnything(); +}); + +test("works with data masking", async () => { + type ItemDetails = { + __typename: string; + text: string; + } & { " $fragmentName"?: "ItemDetailsFragment" }; + + type Item = { + __typename: string; + id: number; + } & { + " $fragmentRefs"?: { ItemDetailsFragment: ItemDetails }; + }; + + const detailsFragment: TypedDocumentNode = gql` + fragment ItemDetailsFragment on Item { + text + } + `; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + ...ItemDetailsFragment + } + + ${detailsFragment} + `; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + const { cache } = client; + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + fragmentName: "ItemFragment", + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + const parentObservable = client.watchFragment({ + fragment, + fragmentName: "ItemFragment", + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + const childObservable = client.watchFragment({ + fragment: detailsFragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + const parentStream = new ObservableStream(parentObservable); + const childStream = new ObservableStream(childObservable); + + await expect(parentStream).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + dataState: "complete", + complete: true, + }); + await expect(childStream).toEmitTypedValue({ + data: [ + { __typename: "Item", text: "Item #1" }, + { __typename: "Item", text: "Item #2" }, + { __typename: "Item", text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment, + fragmentName: "ItemFragment", + data: { + __typename: "Item", + id: 2, + text: "Item #2 updated", + }, + }); + + await expect(childStream).toEmitTypedValue({ + data: [ + { __typename: "Item", text: "Item #1" }, + { __typename: "Item", text: "Item #2 updated" }, + { __typename: "Item", text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); + await expect(parentStream).not.toEmitAnything(); + + client.cache.batch({ + update: (cache) => { + cache.writeFragment({ + fragment, + fragmentName: "ItemFragment", + data: { + __typename: "Item", + id: 1, + text: "Item #1 from batch", + }, + }); + + cache.writeFragment({ + fragment, + fragmentName: "ItemFragment", + data: { + __typename: "Item", + id: 5, + text: "Item #5 from batch", + }, + }); + }, + }); + + await expect(childStream).toEmitTypedValue({ + data: [ + { __typename: "Item", text: "Item #1 from batch" }, + { __typename: "Item", text: "Item #2 updated" }, + { __typename: "Item", text: "Item #5 from batch" }, + ], + dataState: "complete", + complete: true, + }); + + await expect(parentStream).not.toEmitAnything(); + + cache.modify({ + id: cache.identify({ __typename: "Item", id: 1 }), + fields: { + text: (_, { DELETE }) => DELETE, + }, + }); + + await expect(childStream).toEmitTypedValue({ + data: [ + { __typename: "Item" }, + { __typename: "Item", text: "Item #2 updated" }, + { __typename: "Item", text: "Item #5 from batch" }, + ], + dataState: "partial", + complete: false, + missing: { + 0: { + text: "Can't find field 'text' on Item:1 object", + }, + }, + }); + await expect(parentStream).not.toEmitAnything(); + + // should not cause rerender since its an item not watched + client.writeFragment({ + fragment, + fragmentName: "ItemFragment", + data: { + __typename: "Item", + id: 6, + text: "Item #6 ignored", + }, + }); + + await expect(parentStream).not.toEmitAnything(); + await expect(childStream).not.toEmitAnything(); +}); + +test("can subscribe to the same object multiple times", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const cache = new InMemoryCache(); + const client = new ApolloClient({ + cache, + link: ApolloLink.empty(), + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1" }, + }); + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 2, text: "Item #2" }, + }); + + const stream1 = new ObservableStream( + client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 1 }, + ], + }) + ); + // ensure we only watch the item once + expect(cache).toHaveNumWatches(1); + + await expect(stream1).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 1, text: "Item #1" }, + ], + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: `Item #1 updated` }, + }); + + await expect(stream1).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1 updated" }, + { __typename: "Item", id: 1, text: "Item #1 updated" }, + ], + dataState: "complete", + complete: true, + }); + + const stream2 = new ObservableStream( + client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 1 }, + ], + }) + ); + expect(cache).toHaveNumWatches(1); + + await expect(stream2).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1 updated" }, + { __typename: "Item", id: 1, text: "Item #1 updated" }, + { __typename: "Item", id: 1, text: "Item #1 updated" }, + ], + dataState: "complete", + complete: true, + }); + + const stream3 = new ObservableStream( + client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 1 }, + ], + }) + ); + expect(cache).toHaveNumWatches(2); + + await expect(stream3).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1 updated" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 1, text: "Item #1 updated" }, + ], + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: `Item #1 updated again` }, + }); + + await expect(stream3).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1 updated again" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 1, text: "Item #1 updated again" }, + ], + dataState: "complete", + complete: true, + }); + await expect(stream2).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1 updated again" }, + { __typename: "Item", id: 1, text: "Item #1 updated again" }, + { __typename: "Item", id: 1, text: "Item #1 updated again" }, + ], + dataState: "complete", + complete: true, + }); + await expect(stream1).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "Item #1 updated again" }, + { __typename: "Item", id: 1, text: "Item #1 updated again" }, + ], + dataState: "complete", + complete: true, + }); + + await Promise.all([ + expect(stream1).not.toEmitAnything(), + expect(stream2).not.toEmitAnything(), + expect(stream3).not.toEmitAnything(), + ]); + + expect(client).toHaveFragmentWatchesOn(fragment, [ + { id: "Item:1", optimistic: true }, + { id: "Item:2", optimistic: true }, + ]); + + stream3.unsubscribe(); + await wait(2); + + expect(cache).toHaveNumWatches(1); + expect(client).toHaveFragmentWatchesOn(fragment, [ + { id: "Item:1", optimistic: true }, + ]); + + stream1.unsubscribe(); + await wait(2); + + expect(cache).toHaveNumWatches(1); + expect(client).toHaveFragmentWatchesOn(fragment, [ + { id: "Item:1", optimistic: true }, + ]); + + stream2.unsubscribe(); + await wait(2); + + expect(cache).toHaveNumWatches(0); +}); + +test("differentiates watches between optimistic and variables", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text(casing: $casing) + } + `; + + const cache = new InMemoryCache(); + const client = new ApolloClient({ + cache, + link: ApolloLink.empty(), + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "ITEM #1" }, + variables: { casing: "UPPER" }, + }); + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "item #1" }, + variables: { casing: "LOWER" }, + }); + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 2, text: "item #2" }, + variables: { casing: "LOWER" }, + }); + + const stream1 = new ObservableStream( + client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 1 }, + ], + variables: { casing: "UPPER" }, + }) + ); + // ensure we only watch the item once + expect(cache).toHaveNumWatches(1); + + await expect(stream1).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "ITEM #1" }, + { __typename: "Item", id: 1, text: "ITEM #1" }, + ], + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "ITEM #1 UPDATED" }, + variables: { casing: "UPPER" }, + }); + + await expect(stream1).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "ITEM #1 UPDATED" }, + { __typename: "Item", id: 1, text: "ITEM #1 UPDATED" }, + ], + dataState: "complete", + complete: true, + }); + + const stream2 = new ObservableStream( + client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 1 }, + ], + variables: { casing: "LOWER" }, + }) + ); + expect(cache).toHaveNumWatches(2); + + await expect(stream2).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "item #1" }, + { __typename: "Item", id: 1, text: "item #1" }, + { __typename: "Item", id: 1, text: "item #1" }, + ], + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "item #1 updated" }, + variables: { casing: "LOWER" }, + }); + + await expect(stream2).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "item #1 updated" }, + { __typename: "Item", id: 1, text: "item #1 updated" }, + { __typename: "Item", id: 1, text: "item #1 updated" }, + ], + dataState: "complete", + complete: true, + }); + await expect(stream1).not.toEmitAnything(); + + const stream3 = new ObservableStream( + client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 1 }, + ], + variables: { casing: "LOWER" }, + optimistic: false, + }) + ); + expect(cache).toHaveNumWatches(4); + + await expect(stream3).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "item #1 updated" }, + { __typename: "Item", id: 2, text: "item #2" }, + { __typename: "Item", id: 1, text: "item #1 updated" }, + ], + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "item #1 updated again" }, + variables: { casing: "LOWER" }, + }); + + await expect(stream3).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "item #1 updated again" }, + { __typename: "Item", id: 2, text: "item #2" }, + { __typename: "Item", id: 1, text: "item #1 updated again" }, + ], + dataState: "complete", + complete: true, + }); + await expect(stream2).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "item #1 updated again" }, + { __typename: "Item", id: 1, text: "item #1 updated again" }, + { __typename: "Item", id: 1, text: "item #1 updated again" }, + ], + dataState: "complete", + complete: true, + }); + await expect(stream1).not.toEmitAnything(); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "FULL REPLACEMENT" }, + variables: { casing: "UPPER" }, + }); + + await expect(stream1).toEmitTypedValue({ + data: [ + { __typename: "Item", id: 1, text: "FULL REPLACEMENT" }, + { __typename: "Item", id: 1, text: "FULL REPLACEMENT" }, + ], + dataState: "complete", + complete: true, + }); + await expect(stream2).not.toEmitAnything(); + await expect(stream3).not.toEmitAnything(); + + expect(cache).toHaveNumWatches(4); + expect(client).toHaveFragmentWatchesOn(fragment, [ + { id: "Item:1", optimistic: true, variables: { casing: "UPPER" } }, + { id: "Item:1", optimistic: true, variables: { casing: "LOWER" } }, + { id: "Item:1", optimistic: false, variables: { casing: "LOWER" } }, + { id: "Item:2", optimistic: false, variables: { casing: "LOWER" } }, + ]); + + stream3.unsubscribe(); + await wait(2); + + expect(cache).toHaveNumWatches(2); + expect(client).toHaveFragmentWatchesOn(fragment, [ + { id: "Item:1", optimistic: true, variables: { casing: "UPPER" } }, + { id: "Item:1", optimistic: true, variables: { casing: "LOWER" } }, + ]); + + stream1.unsubscribe(); + await wait(2); + + expect(cache).toHaveNumWatches(1); + expect(client).toHaveFragmentWatchesOn(fragment, [ + { id: "Item:1", optimistic: true, variables: { casing: "LOWER" } }, + ]); + + stream2.unsubscribe(); + await wait(2); + + expect(cache).toHaveNumWatches(0); +}); diff --git a/src/core/__tests__/client.watchFragment/general.test.ts b/src/core/__tests__/client.watchFragment/general.test.ts new file mode 100644 index 00000000000..33c17f91398 --- /dev/null +++ b/src/core/__tests__/client.watchFragment/general.test.ts @@ -0,0 +1,190 @@ +import type { TypedDocumentNode } from "@apollo/client"; +import { ApolloClient, ApolloLink, gql, InMemoryCache } from "@apollo/client"; +import { ObservableStream, wait } from "@apollo/client/testing/internal"; + +test("can subscribe multiple times to watchFragment", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const ItemFragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + client.writeFragment({ + fragment: ItemFragment, + data: { __typename: "Item", id: 1, text: "Item #1" }, + }); + + const observable = client.watchFragment({ + fragment: ItemFragment, + from: { __typename: "Item", id: 1 }, + }); + + using stream1 = new ObservableStream(observable); + using stream2 = new ObservableStream(observable); + + await expect(stream1).toEmitTypedValue({ + data: { __typename: "Item", id: 1, text: "Item #1" }, + dataState: "complete", + complete: true, + }); + + await expect(stream2).toEmitTypedValue({ + data: { __typename: "Item", id: 1, text: "Item #1" }, + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment: ItemFragment, + data: { __typename: "Item", id: 1, text: "Item #1 updated" }, + }); + + await expect(stream1).toEmitTypedValue({ + data: { __typename: "Item", id: 1, text: "Item #1 updated" }, + dataState: "complete", + complete: true, + }); + + await expect(stream2).toEmitTypedValue({ + data: { __typename: "Item", id: 1, text: "Item #1 updated" }, + dataState: "complete", + complete: true, + }); + + await expect(stream1).not.toEmitAnything(); + await expect(stream2).not.toEmitAnything(); +}); + +test("dedupes watches when subscribing multiple times", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const ItemFragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const cache = new InMemoryCache(); + const client = new ApolloClient({ + cache, + link: ApolloLink.empty(), + }); + + client.writeFragment({ + fragment: ItemFragment, + data: { __typename: "Item", id: 1, text: "Item #1" }, + }); + + const observable = client.watchFragment({ + fragment: ItemFragment, + from: { __typename: "Item", id: 1 }, + }); + + expect(cache).toHaveNumWatches(0); + + const sub1 = observable.subscribe(() => {}); + const sub2 = observable.subscribe(() => {}); + expect(cache).toHaveNumWatches(1); + + const sub3 = observable.subscribe(() => {}); + expect(cache).toHaveNumWatches(1); + + [sub1, sub2, sub3].forEach((sub) => sub.unsubscribe()); + await wait(0); + expect(cache).toHaveNumWatches(0); + + const sub4 = observable.subscribe(() => {}); + expect(cache).toHaveNumWatches(1); + + sub4.unsubscribe(); + await wait(0); + expect(cache).toHaveNumWatches(0); +}); + +test("emits null with from: null", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const ItemFragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + const observable = client.watchFragment({ + fragment: ItemFragment, + from: null, + }); + + using stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: null, + dataState: "complete", + complete: true, + }); + + await expect(stream).not.toEmitAnything(); +}); + +test("emits empty object when data is not in the cache", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const ItemFragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + const observable = client.watchFragment({ + fragment: ItemFragment, + from: { __typename: "Item", id: 1 }, + }); + + using stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: {}, + dataState: "partial", + complete: false, + missing: "Dangling reference to missing Item:1 object", + }); + + await expect(stream).not.toEmitAnything(); +}); diff --git a/src/core/__tests__/client.watchFragment/getCurrentResult.test.ts b/src/core/__tests__/client.watchFragment/getCurrentResult.test.ts new file mode 100644 index 00000000000..1bb96b4149f --- /dev/null +++ b/src/core/__tests__/client.watchFragment/getCurrentResult.test.ts @@ -0,0 +1,624 @@ +import type { TypedDocumentNode } from "@apollo/client"; +import { ApolloClient, ApolloLink, gql, InMemoryCache } from "@apollo/client"; +import { + ObservableStream, + spyOnConsole, +} from "@apollo/client/testing/internal"; + +interface Item { + __typename: "Item"; + id: number; + text: string; +} + +test("returns initial result before subscribing", async () => { + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1" }, + }); + + const observable = client.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 }, + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: { __typename: "Item", id: 1, text: "Item #1" }, + dataState: "complete", + complete: true, + }); +}); + +test("returns initial emitted value after subscribing", async () => { + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + const diffSpy = jest.spyOn(client.cache, "diff"); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1" }, + }); + + const observable = client.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 }, + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: { __typename: "Item", id: 1, text: "Item #1" }, + dataState: "complete", + complete: true, + }); + + diffSpy.mockClear(); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: { __typename: "Item", id: 1, text: "Item #1" }, + dataState: "complete", + complete: true, + }); + expect(diffSpy).not.toHaveBeenCalled(); +}); + +test("returns most recently emitted value", async () => { + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1" }, + }); + + const observable = client.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 }, + }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: { __typename: "Item", id: 1, text: "Item #1" }, + dataState: "complete", + complete: true, + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: { __typename: "Item", id: 1, text: "Item #1" }, + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1 updated" }, + }); + + await expect(stream).toEmitTypedValue({ + data: { __typename: "Item", id: 1, text: "Item #1 updated" }, + dataState: "complete", + complete: true, + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: { __typename: "Item", id: 1, text: "Item #1 updated" }, + dataState: "complete", + complete: true, + }); +}); + +test("returns updated value if changed before subscribing", async () => { + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1" }, + }); + + const observable = client.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 }, + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: { __typename: "Item", id: 1, text: "Item #1" }, + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1 updated" }, + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: { __typename: "Item", id: 1, text: "Item #1 updated" }, + dataState: "complete", + complete: true, + }); +}); + +test("returns referentially stable value", async () => { + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1" }, + }); + + const observable = client.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 }, + }); + + const firstResult = observable.getCurrentResult(); + expect(firstResult).toStrictEqualTyped({ + data: { __typename: "Item", id: 1, text: "Item #1" }, + dataState: "complete", + complete: true, + }); + + expect(observable.getCurrentResult()).toBe(firstResult); + expect(observable.getCurrentResult()).toBe(firstResult); + expect(observable.getCurrentResult()).toBe(firstResult); + + const stream = new ObservableStream(observable); + const result = await stream.takeNext(); + + // Ensure subscribing to the observable and emitting the first value doesn't + // change the identity of the object + expect(result).toBe(firstResult); + expect(observable.getCurrentResult()).toBe(firstResult); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1 updated" }, + }); + + // ensure it changes identity when a new value is emitted + const result2 = await stream.takeNext(); + const secondResult = observable.getCurrentResult(); + + expect(secondResult).not.toBe(firstResult); + expect(secondResult).toBe(result2); + expect(observable.getCurrentResult()).toBe(secondResult); + expect(observable.getCurrentResult()).toBe(secondResult); + expect(observable.getCurrentResult()).toBe(secondResult); +}); + +test("returns partial result with no cache data", async () => { + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + const observable = client.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 }, + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: {}, + dataState: "partial", + complete: false, + missing: "Dangling reference to missing Item:1 object", + }); +}); + +test("is lazy computed", async () => { + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + const cache = new InMemoryCache(); + const client = new ApolloClient({ + cache, + link: ApolloLink.empty(), + }); + + jest.spyOn(cache, "diff"); + + const observable = client.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 }, + }); + + expect(cache.diff).not.toHaveBeenCalled(); + observable.getCurrentResult(); + expect(cache.diff).toHaveBeenCalledTimes(1); +}); + +test("handles arrays", async () => { + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + const observable = client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 2, text: "Item #2 updated" }, + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); +}); + +test("handles arrays with an active subscription", async () => { + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + const observable = client.watchFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + observable.subscribe(); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 2, text: "Item #2 updated" }, + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); +}); + +test("handles arrays with null", async () => { + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + const observable = client.watchFragment({ + fragment, + from: [null, null, { __typename: "Item", id: 5 }], + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: [null, null, null], + dataState: "partial", + complete: false, + missing: { + 2: "Dangling reference to missing Item:5 object", + }, + }); +}); + +test("works with data masking", async () => { + type ItemDetails = { + __typename: string; + text: string; + } & { " $fragmentName"?: "ItemDetailsFragment" }; + + type Item = { + __typename: string; + id: number; + } & { + " $fragmentRefs"?: { ItemDetailsFragment: ItemDetails }; + }; + + const detailsFragment: TypedDocumentNode = gql` + fragment ItemDetailsFragment on Item { + text + } + `; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + ...ItemDetailsFragment + } + + ${detailsFragment} + `; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + fragmentName: "ItemFragment", + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + const parentObservable = client.watchFragment({ + fragment, + fragmentName: "ItemFragment", + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + const childObservable = client.watchFragment({ + fragment: detailsFragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + + expect(parentObservable.getCurrentResult()).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + dataState: "complete", + complete: true, + }); + expect(childObservable.getCurrentResult()).toStrictEqualTyped({ + data: [ + { __typename: "Item", text: "Item #1" }, + { __typename: "Item", text: "Item #2" }, + { __typename: "Item", text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); + + client.writeFragment({ + fragment, + fragmentName: "ItemFragment", + data: { __typename: "Item", id: 2, text: "Item #2 updated" }, + }); + + expect(parentObservable.getCurrentResult()).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + dataState: "complete", + complete: true, + }); + expect(childObservable.getCurrentResult()).toStrictEqualTyped({ + data: [ + { __typename: "Item", text: "Item #1" }, + { __typename: "Item", text: "Item #2 updated" }, + { __typename: "Item", text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); +}); + +test("works with data masking @unmask migrate mode", async () => { + using consoleSpy = spyOnConsole("warn"); + type ItemDetails = { + __typename: string; + text: string; + } & { " $fragmentName"?: "ItemDetailsFragment" }; + + type Item = { + __typename: string; + id: number; + text: string; + } & { + " $fragmentRefs"?: { ItemDetailsFragment: ItemDetails }; + }; + + const detailsFragment: TypedDocumentNode = gql` + fragment ItemDetailsFragment on Item { + text + } + `; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + ...ItemDetailsFragment @unmask(mode: "migrate") + } + + ${detailsFragment} + `; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + fragmentName: "ItemFragment", + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + const observable = client.watchFragment({ + fragment, + fragmentName: "ItemFragment", + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); + + expect(console.warn).toHaveBeenCalledTimes(3); + for (let i = 0; i < 3; i++) { + expect(console.warn).toHaveBeenNthCalledWith( + i + 1, + expect.stringContaining("Accessing unmasked field on %s at path '%s'."), + "fragment 'ItemFragment'", + `[${i}].text` + ); + } + consoleSpy.warn.mockClear(); + + client.writeFragment({ + fragment, + fragmentName: "ItemFragment", + data: { __typename: "Item", id: 2, text: "Item #2 updated" }, + }); + + expect(observable.getCurrentResult()).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); + + expect(console.warn).toHaveBeenCalledTimes(3); + for (let i = 0; i < 3; i++) { + expect(console.warn).toHaveBeenNthCalledWith( + i + 1, + expect.stringContaining("Accessing unmasked field on %s at path '%s'."), + "fragment 'ItemFragment'", + `[${i}].text` + ); + } +}); diff --git a/src/core/__tests__/client.watchFragment/types.test.ts b/src/core/__tests__/client.watchFragment/types.test.ts new file mode 100644 index 00000000000..962563f9921 --- /dev/null +++ b/src/core/__tests__/client.watchFragment/types.test.ts @@ -0,0 +1,281 @@ +import { expectTypeOf } from "expect-type"; + +import type { + DataValue, + Reference, + StoreObject, + TypedDocumentNode, +} from "@apollo/client"; +import { ApolloClient, ApolloLink, InMemoryCache } from "@apollo/client"; +import type { MissingTree } from "@apollo/client/cache"; + +describe.skip("type tests", () => { + interface Item { + __typename: "Item"; + id: number; + text: string; + } + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + let fragment!: TypedDocumentNode>; + + test("from: null -> null", () => { + const observable = client.watchFragment({ fragment, from: null }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloClient.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf<{ + data: null; + dataState: "complete"; + complete: true; + missing?: never; + }>(); + }); + + test("from: StoreObject -> TData", () => { + const observable = client.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 }, + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloClient.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: string -> TData", () => { + const observable = client.watchFragment({ fragment, from: "Item:1" }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloClient.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: Reference -> TData", () => { + const observable = client.watchFragment({ + fragment, + from: { __ref: "Item:1" }, + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloClient.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: StoreObject | null -> TData | null", () => { + const observable = client.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 } as StoreObject | null, + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloClient.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: null; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: string | null -> TData | null", () => { + const observable = client.watchFragment({ + fragment, + from: "Item:1" as string | null, + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloClient.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: null; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: Reference | null -> TData | null", () => { + const observable = client.watchFragment({ + fragment, + from: { __ref: "Item:1" } as Reference | null, + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloClient.ObservableFragment + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: null; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: Item; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: DataValue.Partial; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: Array -> Array", () => { + const observable = client.watchFragment({ + fragment, + from: [null], + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloClient.ObservableFragment> + >(); + expectTypeOf(result).toEqualTypeOf<{ + data: Array; + dataState: "complete"; + complete: true; + missing?: never; + }>(); + }); + + test("from: Array -> Array", () => { + const observable = client.watchFragment({ + fragment, + from: [{ __typename: "Item", id: 1 }, "Item:1", { __ref: "Item:1" }], + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloClient.ObservableFragment> + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: Array; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: Array | null>; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); + + test("from: Array -> Array", () => { + const observable = client.watchFragment({ + fragment, + from: [{ __typename: "Item", id: 1 }, null], + }); + const result = observable.getCurrentResult(); + + expectTypeOf(observable).toEqualTypeOf< + ApolloClient.ObservableFragment> + >(); + expectTypeOf(result).toEqualTypeOf< + | { + data: Array; + dataState: "complete"; + complete: true; + missing?: never; + } + | { + data: Array | null>; + dataState: "partial"; + complete: false; + missing?: MissingTree; + } + >(); + }); +}); diff --git a/src/react/hooks/__tests__/useFragment.test.tsx b/src/react/hooks/__tests__/useFragment.test.tsx index e1ba367c8fc..bb8d312f900 100644 --- a/src/react/hooks/__tests__/useFragment.test.tsx +++ b/src/react/hooks/__tests__/useFragment.test.tsx @@ -1439,7 +1439,7 @@ describe("useFragment", () => { }); }); - it("returns correct data when options change", async () => { + it("returns correct data when from changes", async () => { const client = new ApolloClient({ cache: new InMemoryCache(), link: ApolloLink.empty(), @@ -1498,6 +1498,72 @@ describe("useFragment", () => { await expect(takeSnapshot).not.toRerender(); }); + it("returns correct data when options change", async () => { + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + type User = { __typename: "User"; id: number; name: string }; + const fragment: TypedDocumentNode = gql` + fragment UserFragment on User { + id + name(casing: $casing) + } + `; + + client.writeFragment({ + fragment, + data: { __typename: "User", id: 1, name: "ALICE" }, + variables: { casing: "upper" }, + }); + + client.writeFragment({ + fragment, + data: { __typename: "User", id: 1, name: "alice" }, + variables: { casing: "lower" }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, rerender } = await renderHookToSnapshotStream( + ({ casing }) => + useFragment({ + fragment, + from: { __typename: "User", id: 1 }, + variables: { casing }, + }), + { + initialProps: { casing: "upper" }, + wrapper: ({ children }) => ( + {children} + ), + } + ); + + { + const snapshot = await takeSnapshot(); + + expect(snapshot).toStrictEqualTyped({ + complete: true, + data: { __typename: "User", id: 1, name: "ALICE" }, + dataState: "complete", + }); + } + + await rerender({ casing: "lower" }); + + { + const snapshot = await takeSnapshot(); + + expect(snapshot).toStrictEqualTyped({ + complete: true, + data: { __typename: "User", id: 1, name: "alice" }, + dataState: "complete", + }); + } + + await expect(takeSnapshot).not.toRerender(); + }); + it("does not rerender when fields with @nonreactive change", async () => { type Post = { __typename: "Post"; @@ -1770,21 +1836,19 @@ describe("useFragment", () => { } ); - { - const { data, complete } = await takeSnapshot(); - - expect(data).toStrictEqualTyped({}); - expect(complete).toBe(false); - } + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: {}, + dataState: "partial", + complete: false, + }); await rerender({ from: { __typename: "User", id: "1" } }); - { - const { data, complete } = await takeSnapshot(); - - expect(data).toStrictEqualTyped({ __typename: "User", id: "1", age: 30 }); - expect(complete).toBe(true); - } + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: { __typename: "User", id: "1", age: 30 }, + dataState: "complete", + complete: true, + }); }); describe("tests with incomplete data", () => { @@ -2371,20 +2435,6 @@ describe("has the same timing as `useQuery`", () => { id: cache.identify(item2), }); - { - // unintended extra render - const { withinDOM } = await renderStream.takeRender(); - const parent = withinDOM().getByTestId("parent"); - const children = withinDOM().getByTestId("children"); - - expect(within(parent).queryAllByText(/Item #1/).length).toBe(1); - expect(within(children).queryAllByText(/Item #1/).length).toBe(1); - - // problem: useFragment renders before useQuery catches up - expect(within(parent).queryAllByText(/Item #2/).length).toBe(1); - expect(within(children).queryAllByText(/Item #2/).length).toBe(0); - } - { const { withinDOM } = await renderStream.takeRender(); const parent = withinDOM().getByTestId("parent"); @@ -2570,7 +2620,13 @@ describe.skip("Type Tests", () => { expectTypeOf< useFragment.Options >().branded.toEqualTypeOf<{ - from: string | StoreObject | Reference | FragmentType | null; + from: + | string + | StoreObject + | Reference + | FragmentType + | null + | Array | null>; fragment: DocumentNode | TypedDocumentNode; fragmentName?: string; optimistic?: boolean; diff --git a/src/react/hooks/__tests__/useFragment/arrays.test.tsx b/src/react/hooks/__tests__/useFragment/arrays.test.tsx new file mode 100644 index 00000000000..ae6c6cd5805 --- /dev/null +++ b/src/react/hooks/__tests__/useFragment/arrays.test.tsx @@ -0,0 +1,585 @@ +import { + disableActEnvironment, + renderHookToSnapshotStream, +} from "@testing-library/react-render-stream"; + +import type { TypedDocumentNode } from "@apollo/client"; +import { ApolloClient, ApolloLink, gql, InMemoryCache } from "@apollo/client"; +import { useFragment } from "@apollo/client/react"; +import { createClientWrapper } from "@apollo/client/testing/internal"; + +test("can use array for `from` to get array of items", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => + useFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("returns result as complete for null array item `from` value", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => + useFragment({ + fragment, + from: [null, null, null], + }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [null, null, null], + dataState: "complete", + complete: true, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("returns as partial if some `from` items are incomplete mixed with null", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => + useFragment({ + fragment, + from: [null, null, { __typename: "Item", id: 5 }], + }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [null, null, null], + dataState: "partial", + complete: false, + missing: { + 2: "Dangling reference to missing Item:5 object", + }, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("allows mix of array identifiers", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => + useFragment({ + fragment, + from: [{ __typename: "Item", id: 1 }, "Item:2", null], + }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + null, + ], + dataState: "complete", + complete: true, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("returns empty array with empty from", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useFragment({ fragment, from: [] }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [], + dataState: "complete", + complete: true, + }); + await expect(takeSnapshot).not.toRerender(); +}); + +test("returns incomplete results when cache is empty", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => + useFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [null, null, null], + dataState: "partial", + complete: false, + missing: { + 0: "Dangling reference to missing Item:1 object", + 1: "Dangling reference to missing Item:2 object", + 2: "Dangling reference to missing Item:5 object", + }, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("can use static arrays with useFragment with partially fulfilled items", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 2; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => + useFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + null, + ], + dataState: "partial", + complete: false, + missing: { + 2: "Dangling reference to missing Item:5 object", + }, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("handles changing array size", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, rerender } = await renderHookToSnapshotStream( + ({ from }) => useFragment({ fragment, from }), + { + initialProps: { + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + ], + }, + wrapper: createClientWrapper(client), + } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + ], + dataState: "complete", + complete: true, + }); + + await rerender({ + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); + + await rerender({ + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 5 }, + ], + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + dataState: "complete", + complete: true, + }); + + await rerender({ + from: [], + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [], + dataState: "complete", + complete: true, + }); + + await rerender({ + from: [{ __typename: "Item", id: 6 }], + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [null], + dataState: "partial", + complete: false, + missing: { + 0: "Dangling reference to missing Item:6 object", + }, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("updates items in the array with cache writes", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + const { cache } = client; + + for (let i = 1; i <= 2; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => + useFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }), + { wrapper: createClientWrapper(client) } + ); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + null, + ], + dataState: "partial", + complete: false, + missing: { + 2: "Dangling reference to missing Item:5 object", + }, + }); + + client.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 2, + text: "Item #2 updated", + }, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + null, + ], + dataState: "partial", + complete: false, + missing: { + 2: "Dangling reference to missing Item:5 object", + }, + }); + + client.cache.batch({ + update: (cache) => { + cache.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 1, + text: "Item #1 from batch", + }, + }); + + cache.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 5, + text: "Item #5 from batch", + }, + }); + }, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1 from batch" }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + { __typename: "Item", id: 5, text: "Item #5 from batch" }, + ], + dataState: "complete", + complete: true, + }); + + cache.modify({ + id: cache.identify({ __typename: "Item", id: 1 }), + fields: { + text: (_, { DELETE }) => DELETE, + }, + }); + + await expect(takeSnapshot()).resolves.toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + { __typename: "Item", id: 5, text: "Item #5 from batch" }, + ], + dataState: "partial", + complete: false, + missing: { + 0: { + text: "Can't find field 'text' on Item:1 object", + }, + }, + }); + + // should not cause rerender since its an item not watched + client.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 6, + text: "Item #6 ignored", + }, + }); + + await expect(takeSnapshot).not.toRerender(); +}); diff --git a/src/react/hooks/__tests__/useSuspenseFragment.test.tsx b/src/react/hooks/__tests__/useSuspenseFragment.test.tsx index f910da745a4..b8f0710c1d6 100644 --- a/src/react/hooks/__tests__/useSuspenseFragment.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseFragment.test.tsx @@ -24,11 +24,7 @@ import { } from "@apollo/client"; import { ApolloProvider, useSuspenseFragment } from "@apollo/client/react"; import { MockSubscriptionLink } from "@apollo/client/testing"; -import { - renderAsync, - spyOnConsole, - wait, -} from "@apollo/client/testing/internal"; +import { renderAsync, spyOnConsole } from "@apollo/client/testing/internal"; import { MockedProvider } from "@apollo/client/testing/react"; import { removeDirectivesFromDocument } from "@apollo/client/utilities/internal"; import { InvariantError } from "@apollo/client/utilities/invariant"; @@ -1553,14 +1549,13 @@ test("tears down the subscription on unmount", async () => { expect(data).toEqual({ __typename: "Item", id: 1, text: "Item #1" }); } - expect(cache["watches"].size).toBe(1); + expect(cache).toHaveNumWatches(1); unmount(); - // We need to wait a tick since the cleanup is run in a setTimeout to - // prevent strict mode bugs. - await wait(0); - expect(cache["watches"].size).toBe(0); + // Cleanup happens async so we just need to ensure it happens sometime after + // mount + await waitFor(() => expect(cache).toHaveNumWatches(0)); }); test("tears down all watches when rendering multiple records", async () => { @@ -1617,11 +1612,10 @@ test("tears down all watches when rendering multiple records", async () => { } unmount(); - // We need to wait a tick since the cleanup is run in a setTimeout to - // prevent strict mode bugs. - await wait(0); - expect(cache["watches"].size).toBe(0); + // Cleanup happens async so we just need to ensure it happens sometime after + // mount + await waitFor(() => expect(cache).toHaveNumWatches(0)); }); test("tears down watches after default autoDisposeTimeoutMs if component never renders again after suspending", async () => { @@ -1686,11 +1680,13 @@ test("tears down watches after default autoDisposeTimeoutMs if component never r // clear the microtask queue await act(() => Promise.resolve()); - expect(cache["watches"].size).toBe(1); + expect(cache).toHaveNumWatches(1); jest.advanceTimersByTime(30_000); + // Run unsubscribe timeouts from cache watches + jest.runOnlyPendingTimers(); - expect(cache["watches"].size).toBe(0); + expect(cache).toHaveNumWatches(0); jest.useRealTimers(); }); @@ -1767,11 +1763,13 @@ test("tears down watches after configured autoDisposeTimeoutMs if component neve // clear the microtask queue await act(() => Promise.resolve()); - expect(cache["watches"].size).toBe(1); + expect(cache).toHaveNumWatches(1); jest.advanceTimersByTime(5000); + // Run unsubscribe timeouts from cache watches + jest.runOnlyPendingTimers(); - expect(cache["watches"].size).toBe(0); + expect(cache).toHaveNumWatches(0); jest.useRealTimers(); }); @@ -1833,7 +1831,7 @@ test("cancels autoDisposeTimeoutMs if the component renders before timer finishe jest.advanceTimersByTime(30_000); - expect(cache["watches"].size).toBe(1); + expect(cache).toHaveNumWatches(1); jest.useRealTimers(); }); @@ -1992,6 +1990,75 @@ describe.skip("type tests", () => { } }); + test("returns null[] when `from` is null[]", () => { + type Data = { foo: string }; + type Vars = Record; + const fragment: TypedDocumentNode = gql``; + + { + const { data } = useSuspenseFragment({ fragment, from: [null] }); + + expectTypeOf(data).toEqualTypeOf>(); + } + + { + const { data } = useSuspenseFragment({ + fragment: gql``, + from: [null], + }); + + expectTypeOf(data).toEqualTypeOf>(); + } + }); + + test("returns Array when `from` includes null with non-null", () => { + type Data = { foo: string }; + type Vars = Record; + const fragment: TypedDocumentNode = gql``; + + { + const { data } = useSuspenseFragment({ + fragment, + from: [null, { __typename: "Item", id: 1 }], + }); + + expectTypeOf(data).toEqualTypeOf>(); + } + + { + const { data } = useSuspenseFragment({ + fragment: gql``, + from: [null, { __typename: "Item", id: 1 }], + }); + + expectTypeOf(data).toEqualTypeOf>(); + } + }); + + test("returns TData[] when `from` includes array of non-null", () => { + type Data = { foo: string }; + type Vars = Record; + const fragment: TypedDocumentNode = gql``; + + { + const { data } = useSuspenseFragment({ + fragment, + from: [{ __typename: "Item", id: 1 }], + }); + + expectTypeOf(data).toEqualTypeOf>(); + } + + { + const { data } = useSuspenseFragment({ + fragment: gql``, + from: [{ __typename: "Item", id: 1 }], + }); + + expectTypeOf(data).toEqualTypeOf>(); + } + }); + test("variables are optional and can be anything with an untyped DocumentNode", () => { const fragment = gql``; diff --git a/src/react/hooks/__tests__/useSuspenseFragment/arrays.test.tsx b/src/react/hooks/__tests__/useSuspenseFragment/arrays.test.tsx new file mode 100644 index 00000000000..ae694d610a3 --- /dev/null +++ b/src/react/hooks/__tests__/useSuspenseFragment/arrays.test.tsx @@ -0,0 +1,1081 @@ +import type { RenderOptions } from "@testing-library/react"; +import { screen, waitFor } from "@testing-library/react"; +import { + createRenderStream, + disableActEnvironment, + useTrackRenders, +} from "@testing-library/react-render-stream"; +import { userEvent } from "@testing-library/user-event"; +import React, { Suspense } from "react"; + +import type { StoreObject, TypedDocumentNode } from "@apollo/client"; +import { ApolloClient, ApolloLink, gql, InMemoryCache } from "@apollo/client"; +import { useSuspenseFragment } from "@apollo/client/react"; +import { createClientWrapper } from "@apollo/client/testing/internal"; + +async function renderUseSuspenseFragment( + renderHook: (props: Props) => useSuspenseFragment.Result, + options: Pick & { initialProps?: Props } +) { + function UseSuspenseFragment({ props }: { props: Props | undefined }) { + useTrackRenders({ name: "useSuspenseFragment" }); + replaceSnapshot(renderHook(props as any)); + + return null; + } + + function SuspenseFallback() { + useTrackRenders({ name: "SuspenseFallback" }); + + return null; + } + + function App({ props }: { props: Props | undefined }) { + return ( + }> + + + ); + } + + const { render, takeRender, replaceSnapshot } = createRenderStream< + useSuspenseFragment.Result + >({ skipNonTrackingRenders: true }); + + const utils = await render(, options); + + function rerender(props: Props) { + return utils.rerender(); + } + + return { takeRender, rerender }; +} + +test("renders array and does not suspend array for `from` array when written to cache", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderUseSuspenseFragment( + () => + useSuspenseFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }), + { wrapper: createClientWrapper(client) } + ); + + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + }); + + await expect(takeRender).not.toRerender(); +}); + +test("updates items in the array with cache writes", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderUseSuspenseFragment( + () => + useSuspenseFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + }); + } + + client.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 2, + text: "Item #2 updated", + }, + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + }); + } + + client.cache.batch({ + update: (cache) => { + cache.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 1, + text: "Item #1 from batch", + }, + }); + + cache.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 5, + text: "Item #5 from batch", + }, + }); + }, + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1 from batch" }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + { __typename: "Item", id: 5, text: "Item #5 from batch" }, + ], + }); + } + + // should not cause rerender since its an item not watched + client.writeFragment({ + fragment, + data: { + __typename: "Item", + id: 6, + text: "Item #6 ignored", + }, + }); + + await expect(takeRender).not.toRerender(); +}); + +test("does not suspend and returns null array for null `from` array", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderUseSuspenseFragment( + () => useSuspenseFragment({ fragment, from: [null, null, null] }), + { wrapper: createClientWrapper(client) } + ); + + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [null, null, null], + }); + + await expect(takeRender).not.toRerender(); +}); + +test("handles mixed array of identifiers in `from`", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderUseSuspenseFragment( + () => + useSuspenseFragment({ + fragment, + from: [{ __typename: "Item", id: 1 }, "Item:2", null], + }), + { wrapper: createClientWrapper(client) } + ); + + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + null, + ], + }); + + await expect(takeRender).not.toRerender(); +}); + +test("does not suspend and returns empty array for empty `from` array", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderUseSuspenseFragment( + () => useSuspenseFragment({ fragment, from: [] }), + { wrapper: createClientWrapper(client) } + ); + + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [], + }); + + await expect(takeRender).not.toRerender(); +}); + +test("suspends until all items are complete", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderUseSuspenseFragment( + () => + useSuspenseFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1" }, + }); + + await expect(takeRender).not.toRerender({ timeout: 20 }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 2, text: "Item #2" }, + }); + + await expect(takeRender).not.toRerender({ timeout: 20 }); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 5, text: "Item #5" }, + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("suspends until all items are complete with partially complete results on initial render", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 2; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderUseSuspenseFragment( + () => + useSuspenseFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 5, text: "Item #5" }, + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("suspends when an item changes from complete to partial", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + const { cache } = client; + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeRender } = await renderUseSuspenseFragment( + () => + useSuspenseFragment({ + fragment, + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }), + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + }); + } + + cache.modify({ + id: cache.identify({ __typename: "Item", id: 1 }), + fields: { + text: (_, { DELETE }) => DELETE, + }, + }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1 is back" }, + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1 is back" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("handles changing array size", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + + for (let i = 1; i <= 5; i++) { + client.writeFragment({ + fragment, + data: { __typename: "Item", id: i, text: `Item #${i}` }, + }); + } + + using _disabledAct = disableActEnvironment(); + const { takeRender, rerender } = await renderUseSuspenseFragment( + ({ from }) => useSuspenseFragment({ fragment, from }), + { + initialProps: { + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + ], + }, + wrapper: createClientWrapper(client), + } + ); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + ], + }); + } + + await rerender({ + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + }); + } + + await rerender({ + from: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 5 }, + ], + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + }); + } + + await rerender({ + from: [], + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [], + }); + } + + await rerender({ + from: [{ __typename: "Item", id: 6 }], + }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["SuspenseFallback"]); + } + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 6, text: "Item #6" }, + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [{ __typename: "Item", id: 6, text: "Item #6" }], + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("rendering same items in multiple useSuspenseFragment hooks allows for rerendering a different array in the other", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const cache = new InMemoryCache(); + const client = new ApolloClient({ + cache, + link: ApolloLink.empty(), + }); + + function UseSuspenseFragment({ + id, + items, + }: { + id: number; + items: StoreObject[]; + }) { + useTrackRenders({ name: `useSuspenseFragment ${id}` }); + mergeSnapshot({ + [`items${id}`]: useSuspenseFragment({ fragment, from: items }), + }); + + return null; + } + + function SuspenseFallback({ id }: { id: number }) { + // Reset snapshot so it doesn't seem like the useSuspenseFragment hook + // rendered + mergeSnapshot({ [`items${id}`]: undefined }); + useTrackRenders({ name: `SuspenseFallback ${id}` }); + + return null; + } + + function App({ + items1, + items2, + }: { + items1: StoreObject[]; + items2: StoreObject[]; + }) { + return ( + <> + }> + + + }> + + + + ); + } + + using _disabledAct = disableActEnvironment(); + const { render, takeRender, mergeSnapshot } = createRenderStream<{ + items1: useSuspenseFragment.Result | undefined; + items2: useSuspenseFragment.Result | undefined; + }>({ + skipNonTrackingRenders: true, + initialSnapshot: { items1: undefined, items2: undefined }, + }); + + const initialItems = [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + ]; + + const { rerender } = await render( + , + { wrapper: createClientWrapper(client) } + ); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual([ + "SuspenseFallback 2", + "SuspenseFallback 1", + ]); + expect(snapshot).toStrictEqualTyped({ + items1: undefined, + items2: undefined, + }); + } + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1" }, + }); + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 2, text: "Item #2" }, + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual([ + "useSuspenseFragment 2", + "useSuspenseFragment 1", + ]); + expect(snapshot).toStrictEqual({ + items1: { + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + ], + }, + items2: { + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + ], + }, + }); + } + await waitFor(() => expect(cache).toHaveNumWatches(2)); + + await rerender( + + ); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual([ + "SuspenseFallback 2", + "useSuspenseFragment 1", + ]); + expect(snapshot).toStrictEqual({ + items1: { + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + ], + }, + items2: undefined, + }); + } + await waitFor(() => expect(cache).toHaveNumWatches(3)); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 5, text: "Item #5" }, + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment 2"]); + expect(snapshot).toStrictEqual({ + items1: { + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + ], + }, + items2: { + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + }, + }); + } + + await rerender( + + ); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual([ + "useSuspenseFragment 2", + "useSuspenseFragment 1", + ]); + expect(snapshot).toStrictEqual({ + items1: { + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + ], + }, + items2: { + data: [{ __typename: "Item", id: 2, text: "Item #2" }], + }, + }); + } + await waitFor(() => expect(cache).toHaveNumWatches(3)); + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 2, text: "Item #2 updated" }, + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual([ + "useSuspenseFragment 2", + "useSuspenseFragment 1", + ]); + expect(snapshot).toStrictEqual({ + items1: { + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2 updated" }, + ], + }, + items2: { + data: [{ __typename: "Item", id: 2, text: "Item #2 updated" }], + }, + }); + } + + await expect(takeRender).not.toRerender(); +}); + +test("works with transitions", async () => { + type Item = { + __typename: string; + id: number; + text?: string; + }; + + const fragment: TypedDocumentNode = gql` + fragment ItemFragment on Item { + id + text + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: ApolloLink.empty(), + }); + const user = userEvent.setup(); + + function UseSuspenseFragment({ items }: { items: StoreObject[] }) { + useTrackRenders({ name: "useSuspenseFragment" }); + replaceSnapshot(useSuspenseFragment({ fragment, from: items })); + + return null; + } + + function SuspenseFallback() { + useTrackRenders({ name: "SuspenseFallback" }); + + return null; + } + + function App() { + const [items, setItems] = React.useState([ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + ]); + const [isPending, startTransition] = React.useTransition(); + + return ( + <> + + }> + + + + ); + } + + using _disabledAct = disableActEnvironment(); + const { render, takeRender, replaceSnapshot } = createRenderStream< + useSuspenseFragment.Result + >({ skipNonTrackingRenders: true }); + + await render(, { wrapper: createClientWrapper(client) }); + + { + const { renderedComponents } = await takeRender(); + + expect(renderedComponents).toStrictEqualTyped(["SuspenseFallback"]); + } + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 1, text: "Item #1" }, + }); + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 2, text: "Item #2" }, + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqualTyped(["useSuspenseFragment"]); + expect(snapshot).toStrictEqualTyped({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + ], + }); + } + + const button = screen.getByText("Change items"); + await user.click(button); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqual({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + ], + }); + expect(button).toBeDisabled(); + } + + client.writeFragment({ + fragment, + data: { __typename: "Item", id: 5, text: "Item #5" }, + }); + + { + const { renderedComponents, snapshot } = await takeRender(); + + expect(renderedComponents).toStrictEqual(["useSuspenseFragment"]); + expect(snapshot).toStrictEqual({ + data: [ + { __typename: "Item", id: 1, text: "Item #1" }, + { __typename: "Item", id: 2, text: "Item #2" }, + { __typename: "Item", id: 5, text: "Item #5" }, + ], + }); + expect(button).not.toBeDisabled(); + } + + await expect(takeRender).not.toRerender(); +}); diff --git a/src/react/hooks/useFragment.ts b/src/react/hooks/useFragment.ts index 94daa7c2799..e1d1fab0e42 100644 --- a/src/react/hooks/useFragment.ts +++ b/src/react/hooks/useFragment.ts @@ -1,4 +1,3 @@ -import { equal } from "@wry/equality"; import * as React from "react"; import type { @@ -9,13 +8,8 @@ import type { OperationVariables, TypedDocumentNode, } from "@apollo/client"; -import type { - Cache, - MissingTree, - Reference, - StoreObject, -} from "@apollo/client/cache"; -import type { FragmentType, MaybeMasked } from "@apollo/client/masking"; +import type { ApolloCache, MissingTree } from "@apollo/client/cache"; +import type { MaybeMasked } from "@apollo/client/masking"; import type { NoInfer } from "@apollo/client/utilities/internal"; import { useDeepMemo, wrapHook } from "./internal/index.js"; @@ -24,6 +18,7 @@ import { useSyncExternalStore } from "./useSyncExternalStore.js"; export declare namespace useFragment { import _self = useFragment; + export interface Options { /** * A GraphQL document created using the `gql` template string tag from @@ -46,14 +41,11 @@ export declare namespace useFragment { variables?: NoInfer; /** - * An object containing a `__typename` and primary key fields (such as `id`) identifying the entity object from which the fragment will be retrieved, or a `{ __ref: "..." }` reference, or a `string` ID (uncommon). + * An object or array containing a `__typename` and primary key fields + * (such as `id`) identifying the entity object from which the fragment will + * be retrieved, or a `{ __ref: "..." }` reference, or a `string` ID (uncommon). */ - from: - | StoreObject - | Reference - | FragmentType> - | string - | null; + from: useFragment.FromValue | Array>; /** * Whether to read from optimistic or non-optimistic cache data. If @@ -84,6 +76,11 @@ export declare namespace useFragment { } } + /** + * Acceptable values provided to the `from` option. + */ + export type FromValue = ApolloCache.WatchFragmentFromValue; + // TODO: Update this to return `null` when there is no data returned from the // fragment. export type Result = @@ -93,12 +90,18 @@ export declare namespace useFragment { /** {@inheritDoc @apollo/client/react!useFragment.DocumentationTypes.useFragment.Result#missing:member} */ missing?: never; } & GetDataState, "complete">) - | ({ + | { /** {@inheritDoc @apollo/client/react!useFragment.DocumentationTypes.useFragment.Result#complete:member} */ complete: false; /** {@inheritDoc @apollo/client/react!useFragment.DocumentationTypes.useFragment.Result#missing:member} */ missing?: MissingTree; - } & GetDataState, "partial">); + /** {@inheritDoc @apollo/client!QueryResultDocumentation#data:member} */ + data: TData extends Array ? + Array | null> + : DataValue.Partial; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#dataState:member} */ + dataState: "partial"; + }; export namespace DocumentationTypes { namespace useFragment { @@ -138,7 +141,44 @@ export declare namespace useFragment { export function useFragment< TData = unknown, TVariables extends OperationVariables = OperationVariables, ->(options: useFragment.Options): useFragment.Result { +>( + options: useFragment.Options & { + from: Array>>; + } +): useFragment.Result>; + +/** {@inheritDoc @apollo/client/react!useFragment:function(1)} */ +export function useFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, +>( + options: useFragment.Options & { + from: Array; + } +): useFragment.Result>; + +/** {@inheritDoc @apollo/client/react!useFragment:function(1)} */ +export function useFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, +>( + options: useFragment.Options & { + from: Array>; + } +): useFragment.Result>; + +/** {@inheritDoc @apollo/client/react!useFragment:function(1)} */ +export function useFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, +>(options: useFragment.Options): useFragment.Result; + +export function useFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, +>( + options: useFragment.Options +): useFragment.Result | useFragment.Result> { "use no memo"; return wrapHook( "useFragment", @@ -150,115 +190,75 @@ export function useFragment< function useFragment_( options: useFragment.Options -): useFragment.Result { +): useFragment.Result | useFragment.Result> { const client = useApolloClient(options.client); - const { cache } = client; const { from, ...rest } = options; + const { cache } = client; - // We calculate the cache id seperately from `stableOptions` because we don't - // want changes to non key fields in the `from` property to affect - // `stableOptions` and retrigger our subscription. If the cache identifier - // stays the same between renders, we want to reuse the existing subscription. - const id = React.useMemo( - () => - typeof from === "string" ? from - : from === null ? null - : cache.identify(from), - [cache, from] - ); + // We calculate the cache id seperately because we don't want changes to non + // key fields in the `from` property to recreate the observable. If the cache + // identifier stays the same between renders, we want to reuse the existing + // subscription. + const ids = useDeepMemo(() => { + const fromArray = Array.isArray(from) ? from : [from]; - const stableOptions = useDeepMemo(() => ({ ...rest, from: id! }), [rest, id]); + const ids = fromArray.map((value) => + typeof value === "string" ? value + : value === null ? null + : cache.identify(value) + ); - // Since .next is async, we need to make sure that we - // get the correct diff on the next render given new diffOptions - const diff = React.useMemo(() => { - const { fragment, fragmentName, from, optimistic = true } = stableOptions; + return Array.isArray(from) ? ids : ids[0]; + }, [cache, from]); - if (from === null) { - return { - result: diffToResult({ - result: {}, - complete: false, - } as Cache.DiffResult), - }; - } - - const { cache } = client; - const diff = cache.diff({ - ...stableOptions, - returnPartialData: true, - id: from, - query: cache["getFragmentDoc"]( - client["transform"](fragment), - fragmentName - ), - optimistic, - }); + const stableOptions = useDeepMemo( + () => ({ ...rest, from: ids as any }), + [rest, ids] + ); - return { - result: diffToResult({ - ...diff, - result: client["queryManager"].maskFragment({ - fragment, - fragmentName, - // TODO: Revert to `diff.result` once `useFragment` supports `null` as - // valid return value - data: diff.result === null ? {} : diff.result, - }) as any, - }), - }; - }, [client, stableOptions]); + const observable = React.useMemo( + () => client.watchFragment(stableOptions), + [client, stableOptions] + ); - // Used for both getSnapshot and getServerSnapshot - const getSnapshot = React.useCallback(() => diff.result, [diff]); + // Unfortunately we forgot to update the use case of `from: null` on + // useFragment in 4.0 to match `useSuspenseFragment`. As such, we need to + // fallback to data: {} with complete: false when `from` is `null` to maintain + // backwards compatibility. We should plan to change this in v5. + const getSnapshot = React.useCallback( + () => (from === null ? nullResult : observable.getCurrentResult()), + [from, observable] + ); return useSyncExternalStore( React.useCallback( - (forceUpdate) => { + (update) => { let lastTimeout = 0; + const subscription = observable.subscribe({ + next: () => { + // If we get another update before we've re-rendered, bail out of + // the update and try again. This ensures that the relative timing + // between useQuery and useFragment stays roughly the same as + // fixed in https://github.com/apollographql/apollo-client/pull/11083 + clearTimeout(lastTimeout); + lastTimeout = setTimeout(update) as any; + }, + }); - const subscription = - stableOptions.from === null ? - null - : client.watchFragment(stableOptions).subscribe({ - next: (result) => { - // Avoid unnecessarily rerendering this hook for the initial result - // emitted from watchFragment which should be equal to - // `diff.result`. - if (equal(result, diff.result)) return; - diff.result = result; - // If we get another update before we've re-rendered, bail out of - // the update and try again. This ensures that the relative timing - // between useQuery and useFragment stays roughly the same as - // fixed in https://github.com/apollographql/apollo-client/pull/11083 - clearTimeout(lastTimeout); - lastTimeout = setTimeout(forceUpdate) as any; - }, - }); return () => { - subscription?.unsubscribe(); + subscription.unsubscribe(); clearTimeout(lastTimeout); }; }, - [client, stableOptions, diff] + [observable] ), getSnapshot, getSnapshot ); } -function diffToResult( - diff: Cache.DiffResult -): useFragment.Result { - const result = { - data: diff.result, - complete: !!diff.complete, - dataState: diff.complete ? "complete" : "partial", - } as useFragment.Result; // TODO: Remove assertion once useFragment returns null - - if (diff.missing) { - result.missing = diff.missing.missing; - } - - return result; -} +const nullResult = Object.freeze({ + data: {}, + dataState: "partial", + complete: false, +}) as useFragment.Result; diff --git a/src/react/hooks/useSuspenseFragment.ts b/src/react/hooks/useSuspenseFragment.ts index de6e0950858..6ce9ab0decd 100644 --- a/src/react/hooks/useSuspenseFragment.ts +++ b/src/react/hooks/useSuspenseFragment.ts @@ -5,12 +5,11 @@ import type { DataValue, DocumentNode, OperationVariables, - Reference, - StoreObject, TypedDocumentNode, } from "@apollo/client"; +import type { ApolloCache } from "@apollo/client/cache"; import { canonicalStringify } from "@apollo/client/cache"; -import type { FragmentType, MaybeMasked } from "@apollo/client/masking"; +import type { MaybeMasked } from "@apollo/client/masking"; import type { FragmentKey } from "@apollo/client/react/internal"; import { getSuspenseCache } from "@apollo/client/react/internal"; import type { @@ -20,16 +19,9 @@ import type { } from "@apollo/client/utilities/internal"; import { __use } from "./internal/__use.js"; -import { wrapHook } from "./internal/index.js"; +import { useDeepMemo, wrapHook } from "./internal/index.js"; import { useApolloClient } from "./useApolloClient.js"; -type From = - | StoreObject - | Reference - | FragmentType> - | string - | null; - export declare namespace useSuspenseFragment { import _self = useSuspenseFragment; export namespace Base { @@ -48,7 +40,16 @@ export declare namespace useSuspenseFragment { * `fragment` document then that fragment will be used. */ fragmentName?: string; - from: From; + + /** + * An object or array containing a `__typename` and primary key fields + * (such as `id`) identifying the entity object from which the fragment will + * be retrieved, or a `{ __ref: "..." }` reference, or a `string` ID (uncommon). + */ + from: + | useSuspenseFragment.FromValue + | Array>; + // Override this field to make it optional (default: true). optimistic?: boolean; /** @@ -77,6 +78,11 @@ export declare namespace useSuspenseFragment { } } + /** + * Acceptable values provided to the `from` option. + */ + export type FromValue = ApolloCache.WatchFragmentFromValue; + export interface Result { data: DataValue.Complete>; } @@ -97,18 +103,43 @@ export declare namespace useSuspenseFragment { } } -const NULL_PLACEHOLDER = [] as unknown as [ - FragmentKey, - Promise | null>, -]; - /** #TODO documentation */ +export function useSuspenseFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, +>( + options: useSuspenseFragment.Options & { + from: Array>>; + } +): useSuspenseFragment.Result>; + +/** {@inheritDoc @apollo/client/react!useSuspenseFragment:function(1)} */ +export function useSuspenseFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, +>( + options: useSuspenseFragment.Options & { + from: Array; + } +): useSuspenseFragment.Result>; + +/** {@inheritDoc @apollo/client/react!useSuspenseFragment:function(1)} */ +export function useSuspenseFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, +>( + options: useSuspenseFragment.Options & { + from: Array>; + } +): useSuspenseFragment.Result>; + +/** {@inheritDoc @apollo/client/react!useSuspenseFragment:function(1)} */ export function useSuspenseFragment< TData, TVariables extends OperationVariables = OperationVariables, >( options: useSuspenseFragment.Options & { - from: NonNullable>; + from: NonNullable>; } ): useSuspenseFragment.Result; @@ -128,7 +159,7 @@ export function useSuspenseFragment< TVariables extends OperationVariables = OperationVariables, >( options: useSuspenseFragment.Options & { - from: From; + from: useSuspenseFragment.FromValue; } ): useSuspenseFragment.Result; @@ -165,36 +196,27 @@ function useSuspenseFragment_< const { from, variables } = options; const { cache } = client; - const id = React.useMemo( - () => - typeof from === "string" ? from - : from === null ? null - : cache.identify(from), - [cache, from] - ) as string | null; - - const fragmentRef = - id === null ? null : ( - getSuspenseCache(client).getFragmentRef( - [id, options.fragment, canonicalStringify(variables)], - client, - { ...options, variables: variables as TVariables, from: id } - ) - ); + const ids = useDeepMemo(() => { + return Array.isArray(from) ? + from.map((id) => toStringId(cache, id)) + : toStringId(cache, from); + }, [cache, from]); + const idString = React.useMemo( + () => (Array.isArray(ids) ? ids.join(",") : ids), + [ids] + ); + + const fragmentRef = getSuspenseCache(client).getFragmentRef( + [options.fragment, canonicalStringify(variables), idString], + client, + { ...options, variables: variables as TVariables, from: ids } + ); let [current, setPromise] = React.useState< [FragmentKey, Promise | null>] - >( - fragmentRef === null ? NULL_PLACEHOLDER : ( - [fragmentRef.key, fragmentRef.promise] - ) - ); + >([fragmentRef.key, fragmentRef.promise]); React.useEffect(() => { - if (fragmentRef === null) { - return; - } - const dispose = fragmentRef.retain(); const removeListener = fragmentRef.listen((promise) => { setPromise([fragmentRef.key, promise]); @@ -206,10 +228,6 @@ function useSuspenseFragment_< }; }, [fragmentRef]); - if (fragmentRef === null) { - return { data: null }; - } - if (current[0] !== fragmentRef.key) { // eslint-disable-next-line react-compiler/react-compiler current[0] = fragmentRef.key; @@ -220,3 +238,13 @@ function useSuspenseFragment_< return { data }; } + +function toStringId( + cache: ApolloCache, + from: useSuspenseFragment.FromValue +) { + return ( + typeof from === "string" ? from + : from === null ? null + : cache.identify(from)) as string | null; +} diff --git a/src/react/internal/cache/FragmentReference.ts b/src/react/internal/cache/FragmentReference.ts index f8d10e5bff5..1fa8135ba68 100644 --- a/src/react/internal/cache/FragmentReference.ts +++ b/src/react/internal/cache/FragmentReference.ts @@ -1,5 +1,5 @@ import { equal } from "@wry/equality"; -import type { Observable, Subscription } from "rxjs"; +import type { Subscription } from "rxjs"; import type { ApolloClient, OperationVariables } from "@apollo/client"; import type { MaybeMasked } from "@apollo/client/masking"; @@ -23,9 +23,7 @@ export class FragmentReference< TData = unknown, TVariables extends OperationVariables = OperationVariables, > { - public readonly observable: Observable< - ApolloClient.WatchFragmentResult - >; + public readonly observable: ApolloClient.ObservableFragment; public readonly key: FragmentKey = {}; public promise!: FragmentRefPromise>; @@ -44,7 +42,7 @@ export class FragmentReference< TData, TVariables > & { - from: string; + from: string | null | Array; }, options: FragmentReferenceOptions ) { @@ -58,7 +56,7 @@ export class FragmentReference< this.onDispose = options.onDispose; } - const diff = this.getDiff(client, watchFragmentOptions); + const result = this.observable.getCurrentResult(); // Start a timer that will automatically dispose of the query if the // suspended resource does not use this fragmentRef in the given time. This @@ -74,8 +72,8 @@ export class FragmentReference< }; this.promise = - diff.complete ? - createFulfilledPromise(diff.result) + result.complete ? + createFulfilledPromise(result.data) : this.createPendingPromise(); this.subscribeToFragment(); @@ -130,7 +128,7 @@ export class FragmentReference< this.subscription.add(this.onDispose); } - private handleNext(result: ApolloClient.WatchFragmentResult) { + private handleNext(result: ApolloClient.WatchFragmentResult) { switch (this.promise.status) { case "pending": { if (result.complete) { @@ -175,34 +173,4 @@ export class FragmentReference< }) ); } - - private getDiff( - client: ApolloClient, - options: ApolloClient.WatchFragmentOptions & { - from: string; - } - ) { - const { cache } = client; - const { from, fragment, fragmentName } = options; - - const diff = cache.diff({ - ...options, - query: cache["getFragmentDoc"]( - client["transform"](fragment), - fragmentName - ), - returnPartialData: true, - id: from, - optimistic: true, - }); - - return { - ...diff, - result: client["queryManager"].maskFragment({ - fragment, - fragmentName, - data: diff.result, - }) as MaybeMasked, - }; - } } diff --git a/src/react/internal/cache/SuspenseCache.ts b/src/react/internal/cache/SuspenseCache.ts index b42aa22cfb7..4e7ea9332f2 100644 --- a/src/react/internal/cache/SuspenseCache.ts +++ b/src/react/internal/cache/SuspenseCache.ts @@ -60,7 +60,7 @@ export class SuspenseCache { cacheKey: FragmentCacheKey, client: ApolloClient, options: ApolloClient.WatchFragmentOptions & { - from: string; + from: string | null | Array; } ) { const ref = this.fragmentRefs.lookupArray(cacheKey) as { diff --git a/src/react/internal/cache/types.ts b/src/react/internal/cache/types.ts index a163431ad9d..2aba1e401e6 100644 --- a/src/react/internal/cache/types.ts +++ b/src/react/internal/cache/types.ts @@ -7,9 +7,9 @@ export type CacheKey = [ ]; export type FragmentCacheKey = [ - cacheId: string, fragment: DocumentNode, stringifiedVariables: string, + cacheId: string | null, ]; export interface QueryKey { diff --git a/src/testing/matchers/index.d.ts b/src/testing/matchers/index.d.ts index e0b73018afd..70127ddce9f 100644 --- a/src/testing/matchers/index.d.ts +++ b/src/testing/matchers/index.d.ts @@ -8,6 +8,7 @@ import type { MatcherHintOptions } from "jest-matcher-utils"; import type { ApolloClient, DocumentNode, + InMemoryCache, ObservableQuery, OperationVariables, } from "@apollo/client"; @@ -17,6 +18,7 @@ import type { ObservableStream } from "../internal/index.js"; import { NextRenderOptions } from "../internal/index.js"; import type { TakeOptions } from "../internal/ObservableStream.js"; +import type { KeyOptions } from "./toHaveFragmentWatches.ts"; import type { CommonStream, ToEmitSimilarValueOptions, @@ -56,6 +58,13 @@ interface ApolloCustomMatchers { */ toMatchDocument(document: DocumentNode): R; + toHaveFragmentWatchesOn: T extends ApolloClient ? + (fragment: DocumentNode, keyOptions: Array) => R + : { error: "matcher needs to be called on an ApolloClient instance" }; + + toHaveNumWatches: T extends InMemoryCache ? (size: number) => R + : { error: "matcher needs to be called on an InMemoryCache instance" }; + /** * Used to determine if the Suspense cache has a cache entry. */ diff --git a/src/testing/matchers/index.ts b/src/testing/matchers/index.ts index 0ba1ebc38b2..6c79a045ca3 100644 --- a/src/testing/matchers/index.ts +++ b/src/testing/matchers/index.ts @@ -8,6 +8,8 @@ import { toEmitAnything } from "./toEmitAnything.js"; import { toEmitError } from "./toEmitError.js"; import { toEmitNext } from "./toEmitNext.js"; import { toEmitTypedValue } from "./toEmitTypedValue.js"; +import { toHaveFragmentWatchesOn } from "./toHaveFragmentWatchesOn.js"; +import { toHaveNumWatches } from "./toHaveNumWatches.js"; import { toHaveSuspenseCacheEntryUsing } from "./toHaveSuspenseCacheEntryUsing.js"; import { toMatchDocument } from "./toMatchDocument.js"; import { @@ -24,6 +26,8 @@ expect.extend({ toEmitNext, toEmitTypedValue, toBeDisposed, + toHaveFragmentWatchesOn, + toHaveNumWatches, toHaveSuspenseCacheEntryUsing, toMatchDocument, toBeGarbageCollected, diff --git a/src/testing/matchers/toHaveFragmentWatchesOn.ts b/src/testing/matchers/toHaveFragmentWatchesOn.ts new file mode 100644 index 00000000000..6ffb7c6e49a --- /dev/null +++ b/src/testing/matchers/toHaveFragmentWatchesOn.ts @@ -0,0 +1,99 @@ +import { iterableEquality } from "@jest/expect-utils"; +import type { Trie } from "@wry/trie"; +import type { MatcherFunction } from "expect"; + +import type { + ApolloClient, + Cache, + DocumentNode, + InMemoryCache, +} from "@apollo/client"; + +export type KeyOptions = Pick< + Cache.WatchOptions, + "id" | "optimistic" | "variables" +>; + +export const toHaveFragmentWatchesOn: MatcherFunction< + [fragment: DocumentNode, keyOptions: Array] +> = function (_client, fragment, keyOptions) { + const hint = this.utils.matcherHint( + "toHaveFragmentWatches", + "client", + "keyOptions", + { + isNot: this.isNot, + } + ); + const client = _client as ApolloClient; + const cache = client.cache as InMemoryCache; + + function getFragmentWatches() { + // testing implementation detail to ensure cache.fragmentWatches also cleans up + const watchedItems: Trie | undefined = cache["fragmentWatches"][ + "weak" + ].get( + client.cache["getFragmentDoc"]( + client["transform"](fragment, true), + undefined + ) + ); + function* iterateStrongTrieChildren( + trie: Trie | undefined, + path: any[] + ): Generator { + if (!trie) return; + if (trie["data"]) { + yield path; + } + if (trie["strong"]) { + for (const [key, value] of Array.from( + (trie["strong"] as Map | undefined>)?.entries() + )) { + yield* iterateStrongTrieChildren(value, path.concat(key)); + } + } + } + + return Array.from(iterateStrongTrieChildren(watchedItems, [])); + } + + const watches = getFragmentWatches().map((cacheKey) => { + if (cacheKey.length > 1) { + throw new Error( + "The `watchFragment` watcher cache key has changed. Please update the toHaveFragmentWatchesOn matcher." + ); + } + + return JSON.parse(cacheKey[0]); + }); + + const pass = this.equals(watches, keyOptions, [ + ...this.customTesters, + iterableEquality, + ]); + + return { + pass, + message: () => { + if (pass) { + return ( + hint + + "\n\nExpected client not to have fragment watches equal to expected but it did." + ); + } + + return ( + hint + + "\n\n" + + this.utils.printDiffOrStringify( + keyOptions, + watches, + "Expected", + "Received", + true + ) + ); + }, + }; +}; diff --git a/src/testing/matchers/toHaveNumWatches.ts b/src/testing/matchers/toHaveNumWatches.ts new file mode 100644 index 00000000000..fd502f9e21c --- /dev/null +++ b/src/testing/matchers/toHaveNumWatches.ts @@ -0,0 +1,35 @@ +import type { MatcherFunction } from "expect"; + +import type { InMemoryCache } from "@apollo/client"; + +export const toHaveNumWatches: MatcherFunction<[size: number]> = function ( + _cache, + size +) { + const hint = this.utils.matcherHint("toHaveNumWatches", "cache", "size", { + isNot: this.isNot, + }); + const cache = _cache as InMemoryCache; + const watchSize = cache["watches"].size; + const watchIds = Array.from(cache["watches"].values()).map( + (watch) => `'${watch.id ?? "ROOT_QUERY"}'` + ); + const pass = watchSize === size; + + const plural = (size: number) => (size === 1 ? "watch" : "watches"); + + return { + pass, + message: () => { + return `${hint}\n\nExpected cache ${ + this.isNot ? "not " : "" + }to have ${this.utils.printExpected(size)} ${plural( + size + )} but instead it had ${this.utils.printReceived(watchSize)} ${plural( + watchSize + )}.\n\nWatches: ${this.utils.printReceived( + "[" + watchIds.join(", ") + "]" + )}`; + }, + }; +}; diff --git a/src/utilities/DeepPartial.ts b/src/utilities/DeepPartial.ts index 0ed92959eea..7fcb8d87b7e 100644 --- a/src/utilities/DeepPartial.ts +++ b/src/utilities/DeepPartial.ts @@ -37,8 +37,8 @@ export type DeepPartial = T // Test for non-tuples ) ? readonly TItem[] extends T ? - ReadonlyArray> - : Array> + ReadonlyArray> + : Array> : DeepPartialObject : DeepPartialObject : unknown; diff --git a/src/utilities/internal/combineLatestBatched.ts b/src/utilities/internal/combineLatestBatched.ts new file mode 100644 index 00000000000..c00ec720bb0 --- /dev/null +++ b/src/utilities/internal/combineLatestBatched.ts @@ -0,0 +1,80 @@ +import { EMPTY, Observable } from "rxjs"; + +/** + * Like `combineLatest` but with some differences: + * + * - It only works on arrays as an input + * - Batches updates to each array index that contains a referentially equal + * observable + * - Doesn't allow for custom scheduler + * - Expects array of constructed observables instead of `Array` + */ +export function combineLatestBatched( + observables: Array & { dirty?: boolean }> +) { + if (observables.length === 0) { + return EMPTY; + } + + return new Observable>((observer) => { + const { length } = observables; + // Keeps track of current values for each observable + const values: T[] = new Array(length); + // Used to batch an update each item in the array that share an observable + // so that they can be emitted together. + const indexesByObservable = new Map, Set>(); + + observables.forEach((source, idx) => { + if (!indexesByObservable.has(source)) { + indexesByObservable.set(source, new Set()); + } + + indexesByObservable.get(source)!.add(idx); + }); + + // Track the number of active subscriptions so we know when to complete this + // observable + let active = indexesByObservable.size; + // Track how many observables are left to emit their first value + let remainingFirstValues = indexesByObservable.size; + + let currentBatch: Set> | undefined; + + // Subscribe to each unique observable instead of the raw source array of + // observables since we want at most 1-subscription per unique observable. + // This ensures an update can write to multiple indexes before emitting the + // result. + indexesByObservable.forEach((indexes, source) => { + let hasFirstValue = false; + const subscription = source.subscribe({ + next: (value) => { + indexes.forEach((idx) => (values[idx] = value)); + + if (!hasFirstValue) { + hasFirstValue = true; + remainingFirstValues--; + } + + if (!remainingFirstValues) { + currentBatch ||= new Set(observables.filter((obs) => obs.dirty)); + currentBatch.delete(source); + if (!currentBatch.size) { + observer.next(values.slice()); + currentBatch = undefined; + } + } + }, + complete: () => { + active--; + + if (!active) { + observer.complete(); + } + }, + error: observer.error.bind(observer), + }); + + observer.add(subscription); + }); + }); +} diff --git a/src/utilities/internal/index.ts b/src/utilities/internal/index.ts index 47add2520ec..e551ad4fb0d 100644 --- a/src/utilities/internal/index.ts +++ b/src/utilities/internal/index.ts @@ -17,6 +17,7 @@ export { argumentsObjectFromField } from "./argumentsObjectFromField.js"; export { canUseDOM } from "./canUseDOM.js"; export { checkDocument } from "./checkDocument.js"; export { cloneDeep } from "./cloneDeep.js"; +export { combineLatestBatched } from "./combineLatestBatched.js"; export { compact } from "./compact.js"; export { createFragmentMap } from "./createFragmentMap.js"; export { createFulfilledPromise } from "./createFulfilledPromise.js"; From 00e90544b54a202cd4b3979f6f3f25436be59da2 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 27 Oct 2025 09:54:29 -0600 Subject: [PATCH 254/254] Version Packages (alpha) (#12977) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .changeset/pre.json | 8 +++++++ CHANGELOG.md | 56 +++++++++++++++++++++++++++++++++++++++++++++ package-lock.json | 4 ++-- package.json | 2 +- 4 files changed, 67 insertions(+), 3 deletions(-) diff --git a/.changeset/pre.json b/.changeset/pre.json index 73dac31dce9..2eab7992cad 100644 --- a/.changeset/pre.json +++ b/.changeset/pre.json @@ -9,15 +9,23 @@ "changesets": [ "big-flowers-move", "cold-kiwis-give", + "famous-hats-explode", "flat-worms-notice", "funny-bats-hammer", + "large-ligers-prove", "little-yaks-decide", "neat-lemons-shave", + "neat-windows-compete", + "old-singers-eat", "olive-queens-fold", "perfect-crabs-smile", + "poor-knives-smile", "popular-files-glow", + "shaggy-brooms-talk", "shaggy-islands-yell", "six-islands-drum", + "slimy-ducks-scream", + "spicy-eels-switch", "unlucky-cooks-rhyme" ] } diff --git a/CHANGELOG.md b/CHANGELOG.md index e056190bf04..59301a4c344 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,61 @@ # @apollo/client +## 4.1.0-alpha.3 + +### Minor Changes + +- [#12971](https://github.com/apollographql/apollo-client/pull/12971) [`d11eb40`](https://github.com/apollographql/apollo-client/commit/d11eb40aa41d90ac664705bac01158d58bf55e9b) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Add support for `from: null` in `client.watchFragment` and `cache.watchFragment`. When `from` is `null`, the emitted result is: + + ```ts + { + data: null, + dataState: "complete", + complete: true, + } + ``` + +- [#12971](https://github.com/apollographql/apollo-client/pull/12971) [`d11eb40`](https://github.com/apollographql/apollo-client/commit/d11eb40aa41d90ac664705bac01158d58bf55e9b) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Add support for arrays with `useFragment`, `useSuspenseFragment`, and `client.watchFragment`. This allows the ability to use a fragment to watch multiple entities in the cache. Passing an array to `from` will return `data` as an array where each array index corresponds to the index in the `from` array. + + ```ts + function MyComponent() { + const result = useFragment({ + fragment, + from: [item1, item2, item3], + }); + + // `data` is an array with 3 items + console.log(result); // { data: [{...}, {...}, {...}], dataState: "complete", complete: true } + } + ``` + +- [#12971](https://github.com/apollographql/apollo-client/pull/12971) [`d11eb40`](https://github.com/apollographql/apollo-client/commit/d11eb40aa41d90ac664705bac01158d58bf55e9b) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Add a `getCurrentResult` function to the observable returned by `client.watchFragment` and `cache.watchFragment` that returns the current value for the watched fragment. + + ```ts + const observable = client.watchFragment({ + fragment, + from: { __typename: "Item", id: 1 }, + }); + + console.log(observable.getCurrentResult()); + // { + // data: {...}, + // dataState: "complete", + // complete: true, + // } + ``` + +### Patch Changes + +- [#12971](https://github.com/apollographql/apollo-client/pull/12971) [`d11eb40`](https://github.com/apollographql/apollo-client/commit/d11eb40aa41d90ac664705bac01158d58bf55e9b) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Deduplicate watches created by `useFragment`, `client.watchFragment`, and `cache.watchFragment` that contain the same fragment, variables, and identifier. This should improve performance in situations where a `useFragment` or a `client.watchFragment` is used to watch the same object in multiple places of an application. + +- [#12982](https://github.com/apollographql/apollo-client/pull/12982) [`5c56b32`](https://github.com/apollographql/apollo-client/commit/5c56b3210a2c03e247ec9e600f1e27eb71df5e96) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Ignore top-level `data` values on subsequent chunks in incremental responses. + +- [#12982](https://github.com/apollographql/apollo-client/pull/12982) [`5c56b32`](https://github.com/apollographql/apollo-client/commit/5c56b3210a2c03e247ec9e600f1e27eb71df5e96) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Fix the `Defer20220824Handler.SubsequentResult` type to match the `FormattedSubsequentIncrementalExecutionResult` type in `graphql@17.0.0-alpha.2`. + +- [#12973](https://github.com/apollographql/apollo-client/pull/12973) [`072da24`](https://github.com/apollographql/apollo-client/commit/072da24a8daec3a646ef0cce30de32f95ea0bb23) Thanks [@jerelmiller](https://github.com/jerelmiller)! - Update the `accept` header used with the `GraphQL17Alpha9Handler` to `multipart/mixed;incrementalSpec=v0.2` to ensure the newest incremental delivery format is requested. + +- [#12971](https://github.com/apollographql/apollo-client/pull/12971) [`d11eb40`](https://github.com/apollographql/apollo-client/commit/d11eb40aa41d90ac664705bac01158d58bf55e9b) Thanks [@jerelmiller](https://github.com/jerelmiller)! - `DeepPartial>` now returns `Array>` instead of `Array>`. + ## 4.1.0-alpha.2 ### Minor Changes diff --git a/package-lock.json b/package-lock.json index 5ca228932a7..0f623598eaa 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@apollo/client", - "version": "4.1.0-alpha.2", + "version": "4.1.0-alpha.3", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@apollo/client", - "version": "4.1.0-alpha.2", + "version": "4.1.0-alpha.3", "hasInstallScript": true, "license": "MIT", "workspaces": [ diff --git a/package.json b/package.json index 37d59341ada..583f4d4c69a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@apollo/client", - "version": "4.1.0-alpha.2", + "version": "4.1.0-alpha.3", "description": "A fully-featured caching GraphQL client.", "private": true, "keywords": [