Skip to content

Commit 5b86b2f

Browse files
committed
Polishing.
Switch to Flux.fromIterable(…) from StreamUtils in deleteAll(Iterable). Use switch expressions, refine toList/toCollection arrangement. Guard tests against absent ReplicaSet. See #4838 Original pull request: #4843
1 parent 721799d commit 5b86b2f

File tree

2 files changed

+48
-28
lines changed

2 files changed

+48
-28
lines changed

spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java

+45-28
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,9 @@
2828
import java.util.Optional;
2929
import java.util.function.Function;
3030
import java.util.function.UnaryOperator;
31-
import java.util.stream.Collectors;
3231

3332
import org.reactivestreams.Publisher;
33+
3434
import org.springframework.dao.IncorrectResultSizeDataAccessException;
3535
import org.springframework.dao.OptimisticLockingFailureException;
3636
import org.springframework.data.domain.Example;
@@ -47,7 +47,6 @@
4747
import org.springframework.data.mongodb.repository.ReactiveMongoRepository;
4848
import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
4949
import org.springframework.data.repository.query.FluentQuery;
50-
import org.springframework.data.util.StreamUtils;
5150
import org.springframework.lang.Nullable;
5251
import org.springframework.util.Assert;
5352

@@ -264,8 +263,15 @@ public Mono<Void> deleteAllById(Iterable<? extends ID> ids) {
264263

265264
Assert.notNull(ids, "The given Iterable of Id's must not be null");
266265

266+
return deleteAllById(ids, getReadPreference());
267+
}
268+
269+
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
270+
private Mono<Void> deleteAllById(Iterable<? extends ID> ids, Optional<ReadPreference> readPreference) {
271+
267272
Query query = getIdQuery(ids);
268-
getReadPreference().ifPresent(query::withReadPreference);
273+
readPreference.ifPresent(query::withReadPreference);
274+
269275
return mongoOperations.remove(query, entityInformation.getJavaType(), entityInformation.getCollectionName()).then();
270276
}
271277

@@ -274,10 +280,9 @@ public Mono<Void> deleteAll(Iterable<? extends T> entities) {
274280

275281
Assert.notNull(entities, "The given Iterable of entities must not be null");
276282

277-
Collection<? extends ID> ids = StreamUtils.createStreamFromIterator(entities.iterator())
278-
.map(entityInformation::getId).collect(Collectors.toList());
279-
280-
return deleteAllById(ids);
283+
Optional<ReadPreference> readPreference = getReadPreference();
284+
return Flux.fromIterable(entities).map(entityInformation::getRequiredId).collectList()
285+
.flatMap(ids -> deleteAllById(ids, readPreference));
281286
}
282287

283288
@Override
@@ -464,10 +469,10 @@ private Query getIdQuery(Iterable<? extends ID> ids) {
464469

465470
/**
466471
* Transform the elements emitted by this Flux into Publishers, then flatten these inner publishers into a single
467-
* Flux. The operation does not allow interleave between performing the map operation for the first and second source
468-
* element guaranteeing the mapping operation completed before subscribing to its following inners, that will then be
469-
* subscribed to eagerly emitting elements in order of their source.
470-
*
472+
* Flux. The operation does not allow interleaving between performing the map operation for the first and second
473+
* source element guaranteeing the mapping operation completed before subscribing to its following inners, that will
474+
* then be subscribed to eagerly emitting elements in order of their source.
475+
*
471476
* <pre class="code">
472477
* Flux.just(first-element).flatMap(...)
473478
* .concatWith(Flux.fromIterable(remaining-elements).flatMapSequential(...))
@@ -481,42 +486,54 @@ private Query getIdQuery(Iterable<? extends ID> ids) {
481486
static <T> Flux<T> concatMapSequentially(List<T> source,
482487
Function<? super T, ? extends Publisher<? extends T>> mapper) {
483488

484-
if (source.isEmpty()) {
485-
return Flux.empty();
486-
}
487-
if (source.size() == 1) {
488-
return Flux.just(source.iterator().next()).flatMap(mapper);
489-
}
490-
if (source.size() == 2) {
491-
return Flux.fromIterable(source).concatMap(mapper);
492-
}
489+
return switch (source.size()) {
490+
case 0 -> Flux.empty();
491+
case 1 -> Flux.just(source.get(0)).flatMap(mapper);
492+
case 2 -> Flux.fromIterable(source).concatMap(mapper);
493+
default -> {
493494

494-
Flux<T> first = Flux.just(source.get(0)).flatMap(mapper);
495-
Flux<T> theRest = Flux.fromIterable(source.subList(1, source.size())).flatMapSequential(mapper);
496-
return first.concatWith(theRest);
495+
Flux<T> first = Flux.just(source.get(0)).flatMap(mapper);
496+
Flux<T> theRest = Flux.fromIterable(source.subList(1, source.size())).flatMapSequential(mapper);
497+
yield first.concatWith(theRest);
498+
}
499+
};
497500
}
498501

499502
static <T> Flux<T> concatMapSequentially(Publisher<T> publisher,
500503
Function<? super T, ? extends Publisher<? extends T>> mapper) {
501504

502-
return Flux.from(publisher).switchOnFirst(((signal, source) -> {
505+
return Flux.from(publisher).switchOnFirst((signal, source) -> {
503506

504507
if (!signal.hasValue()) {
505508
return source.concatMap(mapper);
506509
}
507510

508511
Mono<T> firstCall = Mono.from(mapper.apply(signal.get()));
509512
return firstCall.concatWith(source.skip(1).flatMapSequential(mapper));
510-
}));
513+
});
511514
}
512515

513516
private static <E> List<E> toList(Iterable<E> source) {
514-
return source instanceof List<E> list ? list : new ArrayList<>(toCollection(source));
517+
518+
Collection<E> collection = toCollection(source);
519+
520+
if (collection instanceof List<E> list) {
521+
return list;
522+
}
523+
524+
return new ArrayList<>(collection);
515525
}
516526

517527
private static <E> Collection<E> toCollection(Iterable<E> source) {
518-
return source instanceof Collection<E> collection ? collection
519-
: StreamUtils.createStreamFromIterator(source.iterator()).collect(Collectors.toList());
528+
529+
if (source instanceof Collection<E> collection) {
530+
return collection;
531+
}
532+
533+
List<E> list = new ArrayList<>();
534+
source.forEach(list::add);
535+
536+
return list;
520537
}
521538

522539
/**

spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java

+3
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@
4646
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
4747
import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory;
4848
import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository;
49+
import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable;
4950
import org.springframework.data.repository.query.FluentQuery;
5051
import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider;
5152
import org.springframework.lang.Nullable;
@@ -339,6 +340,7 @@ void savePublisherOfEntitiesShouldInsertEntity() {
339340
}
340341

341342
@RepeatedTest(10) // GH-4838
343+
@EnableIfReplicaSetAvailable
342344
void transactionalSaveAllForStuffThatIsConsideredAnUpdateOfExistingData() {
343345

344346
ReactiveMongoTransactionManager txmgr = new ReactiveMongoTransactionManager(template.getMongoDatabaseFactory());
@@ -349,6 +351,7 @@ void transactionalSaveAllForStuffThatIsConsideredAnUpdateOfExistingData() {
349351
}
350352

351353
@RepeatedTest(10) // GH-4838
354+
@EnableIfReplicaSetAvailable
352355
void transactionalSaveAllWithPublisherForStuffThatIsConsideredAnUpdateOfExistingData() {
353356

354357
ReactiveMongoTransactionManager txmgr = new ReactiveMongoTransactionManager(template.getMongoDatabaseFactory());

0 commit comments

Comments
 (0)