Skip to content
Draft
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
163 changes: 162 additions & 1 deletion src/__tests__/fetchMore.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { gql } from "graphql-tag";
import { assign, cloneDeep } from "lodash";
import { Observable } from "rxjs";
import { delay, Observable, of } from "rxjs";

import type { TypedDocumentNode } from "@apollo/client";
import {
Expand All @@ -22,6 +22,7 @@ import {
mockDeferStream,
ObservableStream,
setupPaginatedCase,
withCacheSizes,
} from "@apollo/client/testing/internal";
import {
concatPagination,
Expand Down Expand Up @@ -2686,6 +2687,166 @@ test("does not allow fetchMore on a cache-only query", async () => {
await expect(stream).not.toEmitAnything();
});

// https://github.com/apollographql/apollo-client/issues/12932
test("emits final result from fetchMore when executeSubSelectedArray cache is full", async () => {
using _ = withCacheSizes({ "inMemoryCache.executeSubSelectedArray": 10 });

const itemData = Array(20)
.fill(undefined)
.map((_, index) => ({
__typename: "Item" as const,
id: index.toString(),
attributes: ["data"],
}));
type GetCommentsData = {
items: Array<{
__typename: "Item";
id: string;
attributes: string[];
}>;
};

type GetCommentsVariables = {
offset: number;
limit: number;
};

const query: TypedDocumentNode<GetCommentsData, GetCommentsVariables> = gql`
query GetComments($offset: Int!, $limit: Int!) {
items(offset: $offset, limit: $limit) {
id
attributes
}
}
`;

const client = new ApolloClient({
cache: new InMemoryCache({
typePolicies: {
Query: {
fields: {
items: offsetLimitPagination(),
},
},
},
}),
link: new ApolloLink((operation) => {
const { offset, limit } = operation.variables;

return of({
data: {
items: itemData.slice(offset, offset + limit),
},
}).pipe(delay(10));
}),
});

const observable = client.watchQuery({
query,
variables: { offset: 0, limit: 10 },
});
const stream = new ObservableStream(observable);

await expect(stream).toEmitTypedValue({
data: undefined,
dataState: "empty",
loading: true,
networkStatus: NetworkStatus.loading,
partial: true,
});

await expect(stream).toEmitTypedValue({
data: { items: itemData.slice(0, 10) },
dataState: "complete",
loading: false,
networkStatus: NetworkStatus.ready,
partial: false,
});

await expect(
observable.fetchMore({
variables: {
offset: stream.getCurrent()?.data?.items?.length ?? 0,
limit: 5,
},
})
).resolves.toStrictEqualTyped({
data: { items: itemData.slice(10, 15) },
});

await expect(stream).toEmitTypedValue({
data: { items: itemData.slice(0, 10) },
dataState: "complete",
loading: true,
networkStatus: NetworkStatus.fetchMore,
partial: false,
});

await expect(stream).toEmitTypedValue({
data: { items: itemData.slice(0, 15) },
dataState: "complete",
loading: false,
networkStatus: NetworkStatus.ready,
partial: false,
});

await expect(
observable.fetchMore({
variables: {
offset: stream.getCurrent()?.data?.items?.length ?? 0,
limit: 5,
},
})
).resolves.toStrictEqualTyped({
data: { items: itemData.slice(15, 20) },
});

await expect(stream).toEmitTypedValue({
data: { items: itemData.slice(0, 15) },
dataState: "complete",
loading: true,
networkStatus: NetworkStatus.fetchMore,
partial: false,
});

await expect(stream).toEmitTypedValue({
data: { items: itemData.slice(0, 20) },
dataState: "complete",
loading: false,
networkStatus: NetworkStatus.ready,
partial: false,
});

await expect(
observable.fetchMore({
variables: {
offset: stream.getCurrent()?.data?.items?.length ?? 0,
limit: 5,
},
})
).resolves.toStrictEqualTyped({
data: { items: [] },
});

await expect(stream).toEmitTypedValue({
data: { items: itemData.slice(0, 20) },
dataState: "complete",
loading: true,
networkStatus: NetworkStatus.fetchMore,
partial: false,
});

await expect(stream).toEmitTypedValue({
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is where the test fails as this value is never emitted. It seems the bug only occurs when the cache limit is full and the result from the network returns an empty array. If this fetchMore were to return items, this emit would happen as usual.

data: { items: itemData.slice(0, 20) },
dataState: "complete",
loading: false,
networkStatus: NetworkStatus.ready,
partial: false,
});

await expect(stream).not.toEmitAnything();
});

function commentsInRange(
start: number,
end: number,
Expand Down
1 change: 1 addition & 0 deletions src/testing/internal/disposables/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@ export { spyOnConsole } from "./spyOnConsole.js";
export { withCleanup } from "./withCleanup.js";
export { enableFakeTimers } from "./enableFakeTimers.js";
export { withProdMode } from "./withProdMode.js";
export { withCacheSizes } from "./withCacheSizes.js";
23 changes: 23 additions & 0 deletions src/testing/internal/disposables/withCacheSizes.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import type { CacheSizes } from "@apollo/client/utilities";
import { cacheSizes } from "@apollo/client/utilities";

import { withCleanup } from "./withCleanup.js";

export function withCacheSizes(tempCacheSizes: Partial<CacheSizes>) {
const prev = { prevCacheSizes: { ...cacheSizes } };
Object.entries(tempCacheSizes).forEach(([key, value]) => {
cacheSizes[key as keyof CacheSizes] = value;
});

return withCleanup(prev, ({ prevCacheSizes }) => {
Object.keys(tempCacheSizes).forEach((k) => {
const key = k as keyof CacheSizes;
if (key in prevCacheSizes) {
cacheSizes[key as keyof CacheSizes] =
prevCacheSizes[key as keyof CacheSizes];
} else {
delete cacheSizes[key];
}
});
});
}
1 change: 1 addition & 0 deletions src/testing/internal/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ export {
spyOnConsole,
withCleanup,
withProdMode,
withCacheSizes,
} from "./disposables/index.js";
export { ObservableStream } from "./ObservableStream.js";

Expand Down
Loading