@@ -6,6 +6,7 @@ import { InMemoryCache } from "@apollo/client/cache";
66import { Defer20220824Handler } from "@apollo/client/incremental" ;
77import { ApolloLink } from "@apollo/client/link" ;
88import {
9+ markAsStreaming ,
910 mockDefer20220824 ,
1011 ObservableStream ,
1112} from "@apollo/client/testing/internal" ;
@@ -163,3 +164,186 @@ test("deduplicates queries as long as a query still has deferred chunks", async
163164 // expect(query5).not.toEmitAnything();
164165 expect ( outgoingRequestSpy ) . toHaveBeenCalledTimes ( 2 ) ;
165166} ) ;
167+
168+ it . each ( [ [ "cache-first" ] , [ "no-cache" ] ] as const ) (
169+ "correctly merges deleted rows when receiving a deferred payload" ,
170+ async ( fetchPolicy ) => {
171+ const query = gql `
172+ query Characters {
173+ characters {
174+ id
175+ uppercase
176+ ... @defer {
177+ lowercase
178+ }
179+ }
180+ }
181+ ` ;
182+
183+ const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } =
184+ mockDefer20220824 ( ) ;
185+ const client = new ApolloClient ( {
186+ cache : new InMemoryCache ( ) ,
187+ link : httpLink ,
188+ incrementalHandler : new Defer20220824Handler ( ) ,
189+ } ) ;
190+
191+ const observable = client . watchQuery ( { query, fetchPolicy } ) ;
192+ const stream = new ObservableStream ( observable ) ;
193+
194+ await expect ( stream ) . toEmitTypedValue ( {
195+ data : undefined ,
196+ dataState : "empty" ,
197+ loading : true ,
198+ networkStatus : NetworkStatus . loading ,
199+ partial : true ,
200+ } ) ;
201+
202+ enqueueInitialChunk ( {
203+ data : {
204+ characters : [
205+ { __typename : "Character" , id : 1 , uppercase : "A" } ,
206+ { __typename : "Character" , id : 2 , uppercase : "B" } ,
207+ { __typename : "Character" , id : 3 , uppercase : "C" } ,
208+ ] ,
209+ } ,
210+ hasNext : true ,
211+ } ) ;
212+
213+ await expect ( stream ) . toEmitTypedValue ( {
214+ data : markAsStreaming ( {
215+ characters : [
216+ { __typename : "Character" , id : 1 , uppercase : "A" } ,
217+ { __typename : "Character" , id : 2 , uppercase : "B" } ,
218+ { __typename : "Character" , id : 3 , uppercase : "C" } ,
219+ ] ,
220+ } ) ,
221+ dataState : "streaming" ,
222+ loading : true ,
223+ networkStatus : NetworkStatus . streaming ,
224+ partial : true ,
225+ } ) ;
226+
227+ enqueueSubsequentChunk ( {
228+ incremental : [ { data : { lowercase : "a" } , path : [ "characters" , 0 ] } ] ,
229+ hasNext : true ,
230+ } ) ;
231+
232+ await expect ( stream ) . toEmitTypedValue ( {
233+ data : markAsStreaming ( {
234+ characters : [
235+ { __typename : "Character" , id : 1 , uppercase : "A" , lowercase : "a" } ,
236+ { __typename : "Character" , id : 2 , uppercase : "B" } ,
237+ { __typename : "Character" , id : 3 , uppercase : "C" } ,
238+ ] ,
239+ } ) ,
240+ dataState : "streaming" ,
241+ loading : true ,
242+ networkStatus : NetworkStatus . streaming ,
243+ partial : true ,
244+ } ) ;
245+
246+ enqueueSubsequentChunk ( {
247+ incremental : [
248+ { data : { lowercase : "b" } , path : [ "characters" , 1 ] } ,
249+ { data : { lowercase : "c" } , path : [ "characters" , 2 ] } ,
250+ ] ,
251+ hasNext : false ,
252+ } ) ;
253+
254+ await expect ( stream ) . toEmitTypedValue ( {
255+ data : {
256+ characters : [
257+ { __typename : "Character" , id : 1 , uppercase : "A" , lowercase : "a" } ,
258+ { __typename : "Character" , id : 2 , uppercase : "B" , lowercase : "b" } ,
259+ { __typename : "Character" , id : 3 , uppercase : "C" , lowercase : "c" } ,
260+ ] ,
261+ } ,
262+ dataState : "complete" ,
263+ loading : false ,
264+ networkStatus : NetworkStatus . ready ,
265+ partial : false ,
266+ } ) ;
267+
268+ void observable . refetch ( ) ;
269+
270+ await expect ( stream ) . toEmitTypedValue ( {
271+ data : {
272+ characters : [
273+ { __typename : "Character" , id : 1 , uppercase : "A" , lowercase : "a" } ,
274+ { __typename : "Character" , id : 2 , uppercase : "B" , lowercase : "b" } ,
275+ { __typename : "Character" , id : 3 , uppercase : "C" , lowercase : "c" } ,
276+ ] ,
277+ } ,
278+ dataState : "complete" ,
279+ loading : true ,
280+ networkStatus : NetworkStatus . refetch ,
281+ partial : false ,
282+ } ) ;
283+
284+ // on refetch, the list is shorter
285+ enqueueInitialChunk ( {
286+ data : {
287+ characters : [
288+ { __typename : "Character" , id : 1 , uppercase : "A" } ,
289+ { __typename : "Character" , id : 2 , uppercase : "B" } ,
290+ ] ,
291+ } ,
292+ hasNext : true ,
293+ } ) ;
294+
295+ await expect ( stream ) . toEmitTypedValue ( {
296+ data : markAsStreaming ( {
297+ characters :
298+ // no-cache fetch policy doesn't merge with existing cache data, so
299+ // the lowercase field is not added to each item
300+ fetchPolicy === "no-cache" ?
301+ [
302+ { __typename : "Character" , id : 1 , uppercase : "A" } ,
303+ { __typename : "Character" , id : 2 , uppercase : "B" } ,
304+ ]
305+ : [
306+ {
307+ __typename : "Character" ,
308+ id : 1 ,
309+ uppercase : "A" ,
310+ lowercase : "a" ,
311+ } ,
312+ {
313+ __typename : "Character" ,
314+ id : 2 ,
315+ uppercase : "B" ,
316+ lowercase : "b" ,
317+ } ,
318+ ] ,
319+ } ) ,
320+ dataState : "streaming" ,
321+ loading : true ,
322+ networkStatus : NetworkStatus . streaming ,
323+ partial : true ,
324+ } ) ;
325+
326+ enqueueSubsequentChunk ( {
327+ incremental : [
328+ { data : { lowercase : "a" } , path : [ "characters" , 0 ] } ,
329+ { data : { lowercase : "b" } , path : [ "characters" , 1 ] } ,
330+ ] ,
331+ hasNext : false ,
332+ } ) ;
333+
334+ await expect ( stream ) . toEmitTypedValue ( {
335+ data : {
336+ characters : [
337+ { __typename : "Character" , id : 1 , uppercase : "A" , lowercase : "a" } ,
338+ { __typename : "Character" , id : 2 , uppercase : "B" , lowercase : "b" } ,
339+ ] ,
340+ } ,
341+ dataState : "complete" ,
342+ loading : false ,
343+ networkStatus : NetworkStatus . ready ,
344+ partial : false ,
345+ } ) ;
346+
347+ await expect ( stream ) . not . toEmitAnything ( ) ;
348+ }
349+ ) ;
0 commit comments