2828import java .util .Optional ;
2929import java .util .function .Function ;
3030import java .util .function .UnaryOperator ;
31- import java .util .stream .Collectors ;
3231
3332import org .reactivestreams .Publisher ;
33+
3434import org .springframework .dao .IncorrectResultSizeDataAccessException ;
3535import org .springframework .dao .OptimisticLockingFailureException ;
3636import org .springframework .data .domain .Example ;
4747import org .springframework .data .mongodb .repository .ReactiveMongoRepository ;
4848import org .springframework .data .mongodb .repository .query .MongoEntityInformation ;
4949import org .springframework .data .repository .query .FluentQuery ;
50- import org .springframework .data .util .StreamUtils ;
5150import org .springframework .lang .Nullable ;
5251import org .springframework .util .Assert ;
5352
@@ -264,8 +263,15 @@ public Mono<Void> deleteAllById(Iterable<? extends ID> ids) {
264263
265264 Assert .notNull (ids , "The given Iterable of Id's must not be null" );
266265
266+ return deleteAllById (ids , getReadPreference ());
267+ }
268+
269+ @ SuppressWarnings ("OptionalUsedAsFieldOrParameterType" )
270+ private Mono <Void > deleteAllById (Iterable <? extends ID > ids , Optional <ReadPreference > readPreference ) {
271+
267272 Query query = getIdQuery (ids );
268- getReadPreference ().ifPresent (query ::withReadPreference );
273+ readPreference .ifPresent (query ::withReadPreference );
274+
269275 return mongoOperations .remove (query , entityInformation .getJavaType (), entityInformation .getCollectionName ()).then ();
270276 }
271277
@@ -274,10 +280,9 @@ public Mono<Void> deleteAll(Iterable<? extends T> entities) {
274280
275281 Assert .notNull (entities , "The given Iterable of entities must not be null" );
276282
277- Collection <? extends ID > ids = StreamUtils .createStreamFromIterator (entities .iterator ())
278- .map (entityInformation ::getId ).collect (Collectors .toList ());
279-
280- return deleteAllById (ids );
283+ Optional <ReadPreference > readPreference = getReadPreference ();
284+ return Flux .fromIterable (entities ).map (entityInformation ::getRequiredId ).collectList ()
285+ .flatMap (ids -> deleteAllById (ids , readPreference ));
281286 }
282287
283288 @ Override
@@ -464,10 +469,10 @@ private Query getIdQuery(Iterable<? extends ID> ids) {
464469
465470 /**
466471 * Transform the elements emitted by this Flux into Publishers, then flatten these inner publishers into a single
467- * Flux. The operation does not allow interleave between performing the map operation for the first and second source
468- * element guaranteeing the mapping operation completed before subscribing to its following inners, that will then be
469- * subscribed to eagerly emitting elements in order of their source.
470- *
472+ * Flux. The operation does not allow interleaving between performing the map operation for the first and second
473+ * source element guaranteeing the mapping operation completed before subscribing to its following inners, that will
474+ * then be subscribed to eagerly emitting elements in order of their source.
475+ *
471476 * <pre class="code">
472477 * Flux.just(first-element).flatMap(...)
473478 * .concatWith(Flux.fromIterable(remaining-elements).flatMapSequential(...))
@@ -481,42 +486,54 @@ private Query getIdQuery(Iterable<? extends ID> ids) {
481486 static <T > Flux <T > concatMapSequentially (List <T > source ,
482487 Function <? super T , ? extends Publisher <? extends T >> mapper ) {
483488
484- if (source .isEmpty ()) {
485- return Flux .empty ();
486- }
487- if (source .size () == 1 ) {
488- return Flux .just (source .iterator ().next ()).flatMap (mapper );
489- }
490- if (source .size () == 2 ) {
491- return Flux .fromIterable (source ).concatMap (mapper );
492- }
489+ return switch (source .size ()) {
490+ case 0 -> Flux .empty ();
491+ case 1 -> Flux .just (source .get (0 )).flatMap (mapper );
492+ case 2 -> Flux .fromIterable (source ).concatMap (mapper );
493+ default -> {
493494
494- Flux <T > first = Flux .just (source .get (0 )).flatMap (mapper );
495- Flux <T > theRest = Flux .fromIterable (source .subList (1 , source .size ())).flatMapSequential (mapper );
496- return first .concatWith (theRest );
495+ Flux <T > first = Flux .just (source .get (0 )).flatMap (mapper );
496+ Flux <T > theRest = Flux .fromIterable (source .subList (1 , source .size ())).flatMapSequential (mapper );
497+ yield first .concatWith (theRest );
498+ }
499+ };
497500 }
498501
499502 static <T > Flux <T > concatMapSequentially (Publisher <T > publisher ,
500503 Function <? super T , ? extends Publisher <? extends T >> mapper ) {
501504
502- return Flux .from (publisher ).switchOnFirst ((( signal , source ) -> {
505+ return Flux .from (publisher ).switchOnFirst ((signal , source ) -> {
503506
504507 if (!signal .hasValue ()) {
505508 return source .concatMap (mapper );
506509 }
507510
508511 Mono <T > firstCall = Mono .from (mapper .apply (signal .get ()));
509512 return firstCall .concatWith (source .skip (1 ).flatMapSequential (mapper ));
510- })) ;
513+ });
511514 }
512515
513516 private static <E > List <E > toList (Iterable <E > source ) {
514- return source instanceof List <E > list ? list : new ArrayList <>(toCollection (source ));
517+
518+ Collection <E > collection = toCollection (source );
519+
520+ if (collection instanceof List <E > list ) {
521+ return list ;
522+ }
523+
524+ return new ArrayList <>(collection );
515525 }
516526
517527 private static <E > Collection <E > toCollection (Iterable <E > source ) {
518- return source instanceof Collection <E > collection ? collection
519- : StreamUtils .createStreamFromIterator (source .iterator ()).collect (Collectors .toList ());
528+
529+ if (source instanceof Collection <E > collection ) {
530+ return collection ;
531+ }
532+
533+ List <E > list = new ArrayList <>();
534+ source .forEach (list ::add );
535+
536+ return list ;
520537 }
521538
522539 /**
0 commit comments