- *
+ *
*
* * @param other the [[Publisher]] to wait for @@ -110,7 +112,7 @@ class Mono[T] private(private val jMono: JMono[T]) * * @return T the result */ - final def block(): T = jMono.block() + final def block(): T = new ReactiveSMono[T](jMono).block() /** * Block until a next signal is received, will return null if onComplete, T if onNext, throw a @@ -127,7 +129,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @param timeout maximum time period to wait for before raising a [[RuntimeException]] * @return T the result */ - final def block(timeout: Duration): T = jMono.block(timeout) + final def block(timeout: Duration): T = new ReactiveSMono[T](jMono).block(timeout) /** * Subscribe to this {[[Mono]] Mono} and block indefinitely until a next signal is @@ -144,7 +146,7 @@ class Mono[T] private(private val jMono: JMono[T]) * * @return T the result */ - final def blockOption(): Option[T] = jMono.blockOptional() + final def blockOption(): Option[T] = new ReactiveSMono[T](jMono).blockOption() /** * Subscribe to this [[Mono]] and block until a next signal is @@ -164,7 +166,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @param timeout maximum time period to wait for before raising a [[RuntimeException]] * @return T the result */ - final def blockOption(timeout: Duration): Option[T] = jMono.blockOptional(timeout) + final def blockOption(timeout: Duration): Option[T] = new ReactiveSMono(jMono).blockOption(timeout) /** * Cast the current [[Mono]] produced type into a target produced type. @@ -176,9 +178,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @param clazz the target type to cast to * @return a casted [[Mono]] */ - final def cast[E](clazz: Class[E]) = new Mono[E]( - jMono.cast(clazz) - ) + final def cast[E](clazz: Class[E]): Mono[E] = Mono.from(new ReactiveSMono[T](jMono).cast[E](clazz)) /** * Turn this [[Mono]] into a hot source and cache last emitted signals for further [[Subscriber]]. @@ -189,9 +189,9 @@ class Mono[T] private(private val jMono: JMono[T]) * * @return a replaying [[Mono]] */ - final def cache(): Mono[T] = Mono[T](jMono.cache()) + final def cache(): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).cache()) - final def cache(ttl: Duration) = Mono(jMono.cache(ttl)) + final def cache(ttl: Duration): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).cache(ttl)) /** * Prepare this [[Mono]] so that subscribers will cancel from it on a @@ -201,9 +201,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @param scheduler the [[reactor.core.scheduler.Scheduler]] to signal cancel on * @return a scheduled cancel [[Mono]] */ - final def cancelOn(scheduler: Scheduler): Mono[T] = Mono[T]( - jMono.cancelOn(scheduler) - ) + final def cancelOn(scheduler: Scheduler): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).cancelOn(scheduler)) /** * Defer the given transformation to this [[Mono]] in order to generate a @@ -218,13 +216,11 @@ class Mono[T] private(private val jMono: JMono[T]) * @return a new [[Mono]] * @see [[Mono.as]] for a loose conversion to an arbitrary type */ - final def compose[V](transformer: (Mono[T] => Publisher[V])): Mono[V] = { + final def compose[V](transformer: Mono[T] => Publisher[V]): Mono[V] = { val transformerFunction = new Function[JMono[T], Publisher[V]] { override def apply(t: JMono[T]): Publisher[V] = transformer(Mono.this) } - Mono[V]( - jMono.compose(transformerFunction) - ) + Mono.from(new ReactiveSMono[V](jMono.compose(transformerFunction))) } /** @@ -236,7 +232,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @param other the [[Publisher]] sequence to concat after this [[Flux]] * @return a concatenated [[Flux]] */ - final def concatWith(other: Publisher[T]): Flux[T] = Flux(jMono.concatWith(other)) + final def concatWith(other: Publisher[T]): Flux[T] = Flux.from(new ReactiveSFlux[T](jMono.concatWith(other))) /** * Provide a default unique value if this mono is completed without any data @@ -249,7 +245,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @return a new [[Mono]] * @see [[Flux.defaultIfEmpty]] */ - final def defaultIfEmpty(defaultV: T): Mono[T] = Mono[T](jMono.defaultIfEmpty(defaultV)) + final def defaultIfEmpty(defaultV: T): Mono[T] = Mono.from[T](new ReactiveSMono[T](jMono.defaultIfEmpty(defaultV))) /** * Delay this [[Mono]] element ([[Subscriber.onNext]] signal) by a given @@ -266,7 +262,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @param delay duration by which to delay the [[Subscriber.onNext]] signal * @return a delayed [[Mono]] */ - final def delayElement(delay: Duration) = Mono(jMono.delayElement(delay)) + final def delayElement(delay: Duration): Mono[T] = Mono.from(new ReactiveSMono(jMono).delayElement(delay)) /** * Delay this [[Mono]] element ([[Subscriber.onNext]] signal) by a given @@ -284,7 +280,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @param timer a time-capable [[Scheduler]] instance to delay the value signal on * @return a delayed [[Mono]] */ - final def delayElement(delay: Duration, timer: Scheduler) = Mono(jMono.delayElement(delay, timer)) + final def delayElement(delay: Duration, timer: Scheduler): Mono[T] = Mono.from(new ReactiveSMono(jMono).delayElement(delay, timer)) /** * Subscribe to this [[Mono Mono]] and another [[Publisher]] that is generated from @@ -305,7 +301,7 @@ class Mono[T] private(private val jMono: JMono[T]) * [[Publisher]] whose termination will trigger relaying the value. * @return this Mono, but delayed until the derived publisher terminates. */ - final def delayUntil(triggerProvider: T => Publisher[_]) = Mono(jMono.delayUntil(triggerProvider)) + final def delayUntil(triggerProvider: T => Publisher[_]): Mono[T] = Mono.from(new ReactiveSMono(jMono).delayUntil(triggerProvider)) /** * Delay the [[Mono.subscribe subscription]] to this [[Mono]] source until the given @@ -318,7 +314,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @return a delayed [[Mono]] * */ - final def delaySubscription(delay: Duration): Mono[T] = Mono(jMono.delaySubscription(delay)) + final def delaySubscription(delay: Duration): Mono[T] = Mono.from(new ReactiveSMono(jMono).delaySubscription(delay)) /** * Delay the [[Mono.subscribe subscription]] to this [[Mono]] source until the given @@ -332,7 +328,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @return a delayed [[Mono]] * */ - final def delaySubscription(delay: Duration, timer: Scheduler) = Mono(jMono.delaySubscription(delay, timer)) + final def delaySubscription(delay: Duration, timer: Scheduler) = Mono.from(new ReactiveSMono(jMono).delaySubscription(delay, timer)) /** * Delay the subscription to this [[Mono]] until another [[Publisher]] @@ -347,9 +343,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @return a delayed [[Mono]] * */ - final def delaySubscription[U](subscriptionDelay: Publisher[U]): Mono[T] = new Mono[T]( - jMono.delaySubscription(subscriptionDelay) - ) + final def delaySubscription[U](subscriptionDelay: Publisher[U]): Mono[T] = Mono.from(new ReactiveSMono(jMono).delaySubscription(subscriptionDelay)) /** * A "phantom-operator" working only if this @@ -363,9 +357,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @tparam X the dematerialized type * @return a dematerialized [[Mono]] */ - final def dematerialize[X](): Mono[X] = new Mono[X]( - jMono.dematerialize[X]() - ) + final def dematerialize[X](): Mono[X] = Mono.from(new ReactiveSMono(jMono).dematerialize[X]()) /** * Triggered after the [[Mono]] terminates, either by completing downstream successfully or with an error. @@ -383,9 +375,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @param afterTerminate the callback to call after [[org.reactivestreams.Subscriber.onNext]], [[org.reactivestreams.Subscriber.onComplete]] without preceding [[org.reactivestreams.Subscriber.onNext]] or [[org.reactivestreams.Subscriber.onError]] * @return a new [[Mono]] */ - final def doAfterSuccessOrError(afterTerminate: (_ >: T, Throwable) => Unit): Mono[T] = Mono[T]( - jMono.doAfterSuccessOrError(afterTerminate) - ) + final def doAfterSuccessOrError(afterTerminate: (_ >: T, Throwable) => Unit): Mono[T] = Mono.from(new ReactiveSMono[T](jMono.doAfterSuccessOrError(afterTerminate))) /** * Add behavior (side-effect) triggered after the [[Mono]] terminates, either by @@ -397,16 +387,9 @@ class Mono[T] private(private val jMono: JMono[T]) * @param afterTerminate the callback to call after [[Subscriber.onComplete]] or [[Subscriber.onError]] * @return an observed [[Flux]] */ - final def doAfterTerminate(afterTerminate: () => Unit): Mono[T] = Mono(jMono.doAfterTerminate(afterTerminate)) + final def doAfterTerminate(afterTerminate: () => Unit): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).doAfterTerminate(afterTerminate)) - final def doFinally(onFinally: (SignalType => Unit)): Mono[T] = { - val onFinallyFunction = new Consumer[SignalType] { - override def accept(t: SignalType): Unit = onFinally(t) - } - new Mono[T]( - jMono.doFinally(onFinallyFunction) - ) - } + final def doFinally(onFinally: SignalType => Unit): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).doFinally(onFinally)) /** * Triggered when the [[Mono]] is cancelled. @@ -419,14 +402,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @param onCancel the callback to call on [[org.reactivestreams.Subscriber.cancel]] * @return a new [[Mono]] */ - final def doOnCancel(onCancel: () => Unit): Mono[T] = { - val onCancelFunction = new Runnable { - override def run(): Unit = onCancel() - } - new Mono[T]( - jMono.doOnCancel(onCancelFunction) - ) - } + final def doOnCancel(onCancel: () => Unit): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).doOnCancel(onCancel)) /** * Add behavior triggered when the [[Mono]] emits a data successfully. @@ -438,14 +414,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @param onNext the callback to call on [[Subscriber.onNext]] * @return a new [[Mono]] */ - final def doOnNext(onNext: (T => Unit)): Mono[T] = { - val onNextFunction = new Consumer[T] { - override def accept(t: T): Unit = onNext(t) - } - new Mono[T]( - jMono.doOnNext(onNextFunction) - ) - } + final def doOnNext(onNext: T => Unit): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).doOnNext(onNext)) /** * Triggered when the [[Mono]] completes successfully. @@ -464,14 +433,7 @@ class Mono[T] private(private val jMono: JMono[T]) * [[org.reactivestreams.Subscriber.onNext]] or [[org.reactivestreams.Subscriber.onComplete]] without preceding [[org.reactivestreams.Subscriber.onNext]] * @return a new [[Mono]] */ - final def doOnSuccess(onSuccess: (T => Unit)): Mono[T] = { - val onSuccessFunction = new Consumer[T] { - override def accept(t: T): Unit = onSuccess(t) - } - new Mono[T]( - jMono.doOnSuccess(onSuccessFunction) - ) - } + final def doOnSuccess(onSuccess: T => Unit): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).doOnSuccess(onSuccess)) /** * Triggered when the [[Mono]] completes with an error. @@ -483,9 +445,7 @@ class Mono[T] private(private val jMono: JMono[T]) * @param onError the error callback to call on [[org.reactivestreams.Subscriber.onError]] * @return a new [[Mono]] */ - final def doOnError(onError: (Throwable => Unit)): Mono[T] = new Mono[T]( - jMono.doOnError(onError) - ) + final def doOnError(onError: Throwable => Unit): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).doOnError(onError)) /** * Triggered when the [[Mono]] completes with an error matching the given exception type. @@ -498,10 +458,11 @@ class Mono[T] private(private val jMono: JMono[T]) * @return an observed [[Mono]] * */ - final def doOnError[E <: Throwable](exceptionType: Class[E], onError: (E => Unit)): Mono[T] = new Mono[T]( - jMono.doOnError(exceptionType, onError: Consumer[E]) - ) - + final def doOnError[E <: Throwable](exceptionType: Class[E], onError: E => Unit): Mono[T] = + doOnError { + case e: E => onError(e) + case _: Throwable => () + } /** * Triggered when the [[Mono]] completes with an error matching the given exception. *
@@ -512,9 +473,10 @@ class Mono[T] private(private val jMono: JMono[T])
* @return an observed [[Mono]]
*
*/
- final def doOnError(predicate: (Throwable => Boolean), onError: (Throwable => Unit)): Mono[T] = new Mono[T](
- jMono.doOnError(predicate: Predicate[Throwable], onError: Consumer[Throwable])
- )
+ final def doOnError(predicate: Throwable => Boolean, onError: Throwable => Unit): Mono[T] = doOnError {
+ case e: Throwable if predicate(e) => onError(e)
+ case _: Throwable => ()
+ }
/**
* Attach a `Long consumer` to this [[Mono]] that will observe any request to this [[Mono]].
@@ -525,7 +487,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param consumer the consumer to invoke on each request
* @return an observed [[Mono]]
*/
- final def doOnRequest(consumer: Long => Unit) = new Mono[T](
+ final def doOnRequest(consumer: Long => Unit): Mono[T] = new Mono[T](
jMono.doOnRequest(consumer)
)
@@ -539,9 +501,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param onSubscribe the callback to call on [[Subscriber#onSubscribe]]
* @return a new [[Mono]]
*/
- final def doOnSubscribe(onSubscribe: Subscription => Unit) = new Mono[T](
- jMono.doOnSubscribe(onSubscribe)
- )
+ final def doOnSubscribe(onSubscribe: Subscription => Unit): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).doOnSubscribe(onSubscribe))
/**
* Add behavior triggered when the [[Mono]] terminates, either by completing successfully or with an error.
@@ -553,7 +513,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param onTerminate the callback to call [[Subscriber.onNext]], [[Subscriber.onComplete]] without preceding [[Subscriber.onNext]] or [[Subscriber.onError]]
* @return a new [[Mono]]
*/
- final def doOnTerminate(onTerminate:() => Unit) = Mono(jMono.doOnTerminate(onTerminate))
+ final def doOnTerminate(onTerminate:() => Unit): Mono[T] = Mono.from(new ReactiveSMono(jMono).doOnTerminate(onTerminate))
private val javaTupleLongAndT2ScalaTupleLongAndT = new Function[Tuple2[JLong, T], (Long, T)] {
override def apply(t: Tuple2[JLong, T]): (Long, T) = (Long2long(t.getT1), t.getT2)
@@ -569,7 +529,7 @@ class Mono[T] private(private val jMono: JMono[T])
*
* @return a transforming [[Mono]]that emits a tuple of time elapsed in milliseconds and matching data
*/
- final def elapsed() = Mono[(Long, T)](jMono.elapsed().map(javaTupleLongAndT2ScalaTupleLongAndT))
+ final def elapsed(): Mono[(Long, T)] = Mono.from(new ReactiveSMono[T](jMono).elapsed())
/**
* Map this [[Mono]] sequence into [[scala.Tuple2]] of T1 [[Long]] timemillis and T2
@@ -618,7 +578,7 @@ class Mono[T] private(private val jMono: JMono[T])
* elements per level of recursion.
* @return this Mono expanded depth-first to a [[Flux]]
*/
- final def expandDeep(expander: T => Publisher[_ <: T], capacityHint: Int) = Flux(jMono.expandDeep(expander, capacityHint))
+ final def expandDeep(expander: T => Publisher[_ <: T], capacityHint: Int): Flux[T] = Flux.from(new ReactiveSMono[T](jMono).expandDeep(expander, capacityHint))
/**
* Recursively expand elements into a graph and emit all the resulting element,
@@ -652,7 +612,7 @@ class Mono[T] private(private val jMono: JMono[T])
* values into a [[Publisher]], producing a graph.
* @return this Mono expanded depth-first to a [[Flux]]
*/
- final def expandDeep(expander: T => Publisher[_ <: T]) = Flux(jMono.expandDeep(expander))
+ final def expandDeep(expander: T => Publisher[_ <: T]): Flux[T] = Flux.from(new ReactiveSMono[T](jMono).expandDeep(expander))
/**
* Recursively expand elements into a graph and emit all the resulting element using
@@ -688,7 +648,7 @@ class Mono[T] private(private val jMono: JMono[T])
* elements per level of recursion.
* @return this Mono expanded breadth-first to a [[Flux]]
*/
- final def expand(expander: T => Publisher[_ <: T], capacityHint: Int) = Flux(jMono.expand(expander, capacityHint))
+ final def expand(expander: T => Publisher[_ <: T], capacityHint: Int): Flux[T] = Flux.from(new ReactiveSMono[T](jMono).expand(expander, capacityHint))
/**
* Recursively expand elements into a graph and emit all the resulting element using
@@ -722,7 +682,7 @@ class Mono[T] private(private val jMono: JMono[T])
* values into a [[Publisher]], producing a graph.
* @return this Mono expanded breadth-first to a [[Flux]]
*/
- final def expand(expander: T => Publisher[_ <: T]) = Flux(jMono.expand(expander))
+ final def expand(expander: T => Publisher[_ <: T]): Flux[T] = Flux.from(new ReactiveSMono[T](jMono).expand(expander))
/**
* Test the result if any of this [[Mono]] and replay it if predicate returns true.
@@ -735,7 +695,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param tester the predicate to evaluate
* @return a filtered [[Mono]]
*/
- final def filter(tester: T => Boolean) = Mono[T](jMono.filter(tester))
+ final def filter(tester: T => Boolean): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).filter(tester))
/**
* If this [[Mono]] is valued, test the value asynchronously using a generated
@@ -751,10 +711,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a filtered [[Mono]]
*/
final def filterWhen(asyncPredicate: T => _ <: Publisher[Boolean] with MapablePublisher[Boolean]): Mono[T] = {
- val asyncPredicateFunction = new Function[T, Publisher[JBoolean]] {
- override def apply(t: T): Publisher[JBoolean] = asyncPredicate(t).map(Boolean2boolean(_))
- }
- Mono(jMono.filterWhen(asyncPredicateFunction))
+ Mono.from(new ReactiveSMono[T](jMono).filterWhen(asyncPredicate))
}
/**
@@ -769,9 +726,12 @@ class Mono[T] private(private val jMono: JMono[T])
* @tparam R the result type bound
* @return a new [[Mono]] with an asynchronously mapped value.
*/
- final def flatMap[R](transformer: T => Mono[R]): Mono[R] = Mono[R](jMono.flatMap(new Function[T, JMono[R]] {
- override def apply(t: T): JMono[R] = transformer(t).jMono
- }))
+ final def flatMap[R](transformer: T => Mono[R]): Mono[R] = {
+ def transformerFunction(t: T): SMono[R] = {
+ new ReactiveSMono[R](transformer(t))
+ }
+ Mono.from[R](new ReactiveSMono[T](jMono).flatMap(transformerFunction))
+ }
/**
* Transform the item emitted by this [[Mono]] into a Publisher, then forward
@@ -786,7 +746,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @tparam R the merged sequence type
* @return a new [[Flux]] as the sequence is not guaranteed to be single at most
*/
- final def flatMapMany[R](mapper: T => Publisher[R]): Flux[R] = Flux(jMono.flatMapMany(mapper))
+ final def flatMapMany[R](mapper: T => Publisher[R]): Flux[R] = Flux.from(new ReactiveSMono(jMono).flatMapMany(mapper))
/**
* Transform the signals emitted by this [[Mono]] into a Publisher, then forward
@@ -805,8 +765,8 @@ class Mono[T] private(private val jMono: JMono[T])
*/
final def flatMapMany[R](mapperOnNext: T => Publisher[R],
mapperOnError: Throwable => Publisher[R],
- mapperOnComplete: () => Publisher[R]) =
- Flux(jMono.flatMapMany(mapperOnNext, mapperOnError, mapperOnComplete))
+ mapperOnComplete: () => Publisher[R]): Flux[R] =
+ Flux.from(new ReactiveSMono[T](jMono).flatMapMany(mapperOnNext, mapperOnError, mapperOnComplete))
/**
* Transform the item emitted by this [[Mono]] into [[Iterable]], , then forward
@@ -822,8 +782,8 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a merged [[Flux]]
*
*/
- final def flatMapIterable[R](mapper: T => Iterable[R]): Flux[R] = Flux(
- jMono.flatMapIterable(mapper.andThen(it => it.asJava))
+ final def flatMapIterable[R](mapper: T => Iterable[R]): Flux[R] = Flux.from(new ReactiveSMono[T](
+ jMono).flatMapIterable(mapper)
)
/**
@@ -831,7 +791,7 @@ class Mono[T] private(private val jMono: JMono[T])
*
* @return a [[Flux]] variant of this [[Mono]]
*/
- final def flux(): Flux[T] = Flux(jMono.flux())
+ final def flux(): Flux[T] = Flux.from(new ReactiveSMono[T](jMono).flux())
/**
* Emit a single boolean true if this [[Mono]] has an element.
@@ -842,9 +802,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a new [[Mono]] with true if a value is emitted and false
* otherwise
*/
- final def hasElement = Mono[Boolean](
- jMono.hasElement.map[Boolean](scalaFunction2JavaFunction((jb: JBoolean) => boolean2Boolean(jb.booleanValue())))
- )
+ final def hasElement: Mono[Boolean] = Mono.from(new ReactiveSMono[T](jMono).hasElement)
/**
* Handle the items emitted by this [[Mono]] by calling a biconsumer with the
@@ -856,7 +814,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @tparam R the transformed type
* @return a transformed [[Mono]]
*/
- final def handle[R](handler: (T, SynchronousSink[R]) => Unit): Mono[R] = Mono[R](jMono.handle(handler))
+ final def handle[R](handler: (T, SynchronousSink[R]) => Unit): Mono[R] = Mono.from(new ReactiveSMono[T](jMono).handle(handler))
/**
* Hides the identity of this [[Mono]] instance.
@@ -867,7 +825,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a new [[Mono]] instance
*/
//TODO: How to test this?
- final def hide: Mono[T] = Mono[T](jMono.hide())
+ final def hide: Mono[T] = Mono.from(new ReactiveSMono[T](jMono).hide())
/**
* Ignores onNext signal (dropping it) and only reacts on termination.
@@ -878,7 +836,7 @@ class Mono[T] private(private val jMono: JMono[T])
*
* @return a new completable [[Mono]].
*/
- final def ignoreElement: Mono[T] = Mono[T](jMono.ignoreElement())
+ final def ignoreElement: Mono[T] = Mono.from(new ReactiveSMono[T](jMono).ignoreElement)
/**
* Observe all Reactive Streams signals and trace them using [[reactor.util.Logger]] support.
@@ -895,7 +853,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @see [[Flux.log()]]
*/
// TODO: How to test all these .log(...) variants?
- final def log: Mono[T] = Mono[T](jMono.log())
+ final def log: Mono[T] = Mono.from(new ReactiveSMono[T](jMono).log())
/**
* Observe all Reactive Streams signals and use [[reactor.util.Logger]] support to handle trace implementation. Default will
@@ -910,7 +868,7 @@ class Mono[T] private(private val jMono: JMono[T])
* suffix will complete, e.g. "reactor.Flux.Map".
* @return a new [[Mono]]
*/
- final def log(category: Option[String]): Mono[T] = Mono[T](jMono.log(category.orNull))
+ final def log(category: Option[String]): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).log(category))
/**
* Observe Reactive Streams signals matching the passed flags `options` and use
@@ -933,7 +891,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a new [[Mono]]
*
*/
- final def log(category: Option[String], level: Level, options: SignalType*): Mono[T] = Mono[T](jMono.log(category.orNull, level, options: _*))
+ final def log(category: Option[String], level: Level, options: SignalType*): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).log(category, level, options = options))
/**
* Observe Reactive Streams signals matching the passed filter `options` and
@@ -959,7 +917,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param options a vararg [[SignalType]] option to filter log messages
* @return a new unaltered [[Mono]]
*/
- final def log(category: Option[String], level: Level, showOperatorLine: Boolean, options: SignalType*): Mono[T] = Mono[T](jMono.log(category.orNull, level, showOperatorLine, options: _*))
+ final def log(category: Option[String], level: Level, showOperatorLine: Boolean, options: SignalType*): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).log(category, level, showOperatorLine, options))
/**
* Transform the item emitted by this [[Mono]] by applying a synchronous function to it.
@@ -972,7 +930,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @tparam R the transformed type
* @return a new [[Mono]]
*/
- final def map[R](mapper: T => R) = Mono(jMono.map(mapper))
+ final def map[R](mapper: T => R): Mono[R] = Mono.from(new ReactiveSMono[T](jMono).map(mapper))
/**
* Transform the incoming onNext, onError and onComplete signals into [[Signal]].
@@ -984,7 +942,7 @@ class Mono[T] private(private val jMono: JMono[T])
*
* @return a [[Mono]] of materialized [[Signal]]
*/
- final def materialize() = new Mono[Signal[T]](jMono.materialize())
+ final def materialize(): Mono[Signal[T]] = Mono.from(new ReactiveSMono[T](jMono).materialize())
/**
* Merge emissions of this [[Mono]] with the provided [[Publisher]].
@@ -996,7 +954,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param other the other [[Publisher]] to merge with
* @return a new [[Flux]] as the sequence is not guaranteed to be at most 1
*/
- final def mergeWith(other: Publisher[_ <: T]) = Flux(jMono.mergeWith(other))
+ final def mergeWith(other: Publisher[_ <: T]) = Flux.from(new ReactiveSMono[T](jMono).mergeWith(other))
/**
* Give a name to this sequence, which can be retrieved using [[reactor.core.scala.Scannable.name()]]
@@ -1018,7 +976,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a new [[Mono]]
* @see [[Mono.first]]
*/
- final def or(other: Mono[_ <: T]) = Mono[T](jMono.or(other.jMono))
+ final def or(other: Mono[_ <: T]): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).or(other.jMono))
/**
* Evaluate the accepted value against the given [[Class]] type. If the
@@ -1032,7 +990,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param clazz the [[Class]] type to test values against
* @return a new [[Mono]] reduced to items converted to the matched type
*/
- final def ofType[U](clazz: Class[U]) = Mono[U](jMono.ofType(clazz))
+ final def ofType[U](clazz: Class[U]): Mono[U] = Mono.from(new ReactiveSMono[T](jMono).ofType(clazz))
/**
* Transform the error emitted by this [[Mono]] by applying a function.
@@ -1043,7 +1001,9 @@ class Mono[T] private(private val jMono: JMono[T])
* @param mapper the error transforming [[Function1]]
* @return a transformed [[Mono]]
*/
- final def onErrorMap(mapper: Throwable => Throwable): Mono[T] = Mono[T](jMono.onErrorMap(mapper))
+ final def onErrorMap(mapper: Throwable => Throwable): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).onErrorMap {
+ case throwable: Throwable => mapper(throwable)
+ })
/**
* Transform the error emitted by this [[Mono]] by applying a function if the
@@ -1057,7 +1017,9 @@ class Mono[T] private(private val jMono: JMono[T])
* @tparam E the error type
* @return a transformed [[Mono]]
*/
- final def onErrorMap[E <: Throwable](`type`: Class[E], mapper: E => Throwable): Mono[T] = Mono[T](jMono.onErrorMap(`type`, mapper))
+ final def onErrorMap[E <: Throwable](`type`: Class[E], mapper: E => Throwable): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).onErrorMap {
+ case t:E if t.getClass == `type` => mapper(t)
+ })
/**
* Transform the error emitted by this [[Mono]] by applying a function if the
@@ -1071,7 +1033,9 @@ class Mono[T] private(private val jMono: JMono[T])
* @param mapper the error transforming [[Function1]]
* @return a transformed [[Mono]]
*/
- final def onErrorMap(predicate: Throwable => Boolean, mapper: Throwable => Throwable): Mono[T] = Mono[T](jMono.onErrorMap(predicate, mapper))
+ final def onErrorMap(predicate: Throwable => Boolean, mapper: Throwable => Throwable): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).onErrorMap {
+ case t: Throwable if predicate(t) => mapper(t)
+ })
/**
* Subscribe to a returned fallback publisher when any error occurs.
@@ -1085,10 +1049,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @see [[Flux.onErrorResume]]
*/
final def onErrorResume(fallback: Throwable => Mono[_ <: T]): Mono[T] = {
- val fallbackFunction = new Function[Throwable, JMono[_ <: T]] {
- override def apply(t: Throwable): JMono[_ <: T] = fallback(t).jMono
- }
- Mono[T](jMono.onErrorResume(fallbackFunction))
+ Mono.from(new ReactiveSMono[T](jMono).onErrorResume((t: Throwable) => fallback(t).jMono))
}
/**
@@ -1106,10 +1067,9 @@ class Mono[T] private(private val jMono: JMono[T])
* @see [[Flux.onErrorResume]]
*/
final def onErrorResume[E <: Throwable](`type`: Class[E], fallback: E => Mono[_ <: T]): Mono[T] = {
- val fallbackFunction = new Function[E, JMono[_ <: T]] {
- override def apply(t: E): JMono[_ <: T] = fallback(t).jMono
- }
- Mono[T](jMono.onErrorResume(`type`, fallbackFunction))
+ Mono.from(new ReactiveSMono[T](jMono).onErrorResume((t: Throwable) => t match {
+ case e: E => fallback(e).jMono
+ }))
}
/**
@@ -1125,12 +1085,10 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a new [[Mono]]
* @see Flux#onErrorResume
*/
- final def onErrorResume(predicate: Throwable => Boolean, fallback: Throwable => Mono[_ <: T]): Mono[T] = {
- val fallbackFunction = new Function[Throwable, JMono[_ <: T]] {
- override def apply(t: Throwable): JMono[_ <: T] = fallback(t).jMono
- }
- Mono[T](jMono.onErrorResume(predicate, fallbackFunction))
- }
+ final def onErrorResume(predicate: Throwable => Boolean, fallback: Throwable => Mono[_ <: T]): Mono[T] =
+ Mono.from(new ReactiveSMono[T](jMono).onErrorResume((t: Throwable) => t match {
+ case e: Throwable if predicate(e) => fallback(e).jMono
+ }))
/**
* Simply emit a captured fallback value when any error is observed on this [[Mono]].
@@ -1142,7 +1100,8 @@ class Mono[T] private(private val jMono: JMono[T])
* @param fallback the value to emit if an error occurs
* @return a new falling back [[Mono]]
*/
- final def onErrorReturn(fallback: T): Mono[T] = Mono[T](jMono.onErrorReturn(fallback))
+ final def onErrorReturn(fallback: T): Mono[T] = Mono.from(new ReactiveSMono[T](jMono)
+ .onErrorResume(_ => SMono.just(fallback)))
/**
* Simply emit a captured fallback value when an error of the specified type is
@@ -1155,7 +1114,10 @@ class Mono[T] private(private val jMono: JMono[T])
* @tparam E the error type
* @return a new falling back [[Mono]]
*/
- final def onErrorReturn[E <: Throwable](`type`: Class[E], fallbackValue: T) = Mono[T](jMono.onErrorReturn(`type`, fallbackValue))
+ final def onErrorReturn[E <: Throwable](`type`: Class[E], fallbackValue: T): Mono[T] = Mono.from(new ReactiveSMono[T](jMono)
+ .onErrorResume((t: Throwable) => t match {
+ case _: E => SMono.just(fallbackValue)
+ }))
/**
* Simply emit a captured fallback value when an error matching the given predicate is
@@ -1167,7 +1129,10 @@ class Mono[T] private(private val jMono: JMono[T])
* @param fallbackValue the value to emit if a matching error occurs
* @return a new [[Mono]]
*/
- final def onErrorReturn(predicate: Throwable => Boolean, fallbackValue: T) = Mono[T](jMono.onErrorReturn(predicate, fallbackValue))
+ final def onErrorReturn(predicate: Throwable => Boolean, fallbackValue: T): Mono[T] = Mono.from(new ReactiveSMono[T](jMono)
+ .onErrorResume((t: Throwable) => t match {
+ case e: Throwable if predicate(e) => SMono.just(fallbackValue)
+ }))
/**
* Detaches the both the child [[Subscriber]] and the [[Subscription]] on
@@ -1178,22 +1143,22 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a detachable [[Mono]]
*/
// TODO: How to test this?
- final def onTerminateDetach() = Mono[T](jMono.onTerminateDetach())
+ final def onTerminateDetach(): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).onTerminateDetach())
/**
* Shares a [[Mono]] for the duration of a function that may transform it and
* consume it as many times as necessary without causing multiple subscriptions
* to the upstream.
*
- * @param transform the tranformation function
+ * @param transform the transformation function
* @tparam R the output value type
* @return a new [[Mono]]
*/
final def publish[R](transform: Mono[T] => Mono[R]): Mono[R] = {
- val transformFunction = new Function[JMono[T], JMono[R]] {
- override def apply(t: JMono[T]): JMono[R] = transform(Mono.this).jMono
+ def transformF(t: SMono[T]): SMono[R] = {
+ new ReactiveSMono[R](transform(Mono.from[T](t)))
}
- Mono[R](jMono.publish(transformFunction))
+ Mono.from[R](new ReactiveSMono[T](jMono).publish[R](transformF))
}
/**
@@ -1209,7 +1174,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return an asynchronously producing [[Mono]]
*/
//TODO: How to test this?
- final def publishOn(scheduler: Scheduler) = new Mono[T](jMono.publishOn(scheduler))
+ final def publishOn(scheduler: Scheduler): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).publishOn(scheduler))
/**
* Repeatedly subscribe to the source completion of the previous subscription.
@@ -1219,7 +1184,7 @@ class Mono[T] private(private val jMono: JMono[T])
*
* @return an indefinitively repeated [[Flux]] on onComplete
*/
- final def repeat() = Flux(jMono.repeat())
+ final def repeat(): Flux[T] = Flux.from(new ReactiveSMono[T](jMono).repeat())
/**
* Repeatedly subscribe to the source if the predicate returns true after completion of the previous subscription.
@@ -1231,7 +1196,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return an eventually repeated [[Flux]] on onComplete
*
*/
- final def repeat(predicate: () => Boolean) = Flux(jMono.repeat(predicate))
+ final def repeat(predicate: () => Boolean): Flux[T] = Flux.from(new ReactiveSMono[T](jMono).repeat(predicate = predicate))
/**
* Repeatedly subscribe to the source if the predicate returns true after completion of the previous subscription.
@@ -1243,7 +1208,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return an eventually repeated [[Flux]] on onComplete up to number of repeat specified
*
*/
- final def repeat(numRepeat: Long) = Flux(jMono.repeat(numRepeat))
+ final def repeat(numRepeat: Long): Flux[T] = Flux.from(new ReactiveSMono[T](jMono).repeat(numRepeat))
/**
* Repeatedly subscribe to the source if the predicate returns true after completion of the previous
@@ -1258,7 +1223,7 @@ class Mono[T] private(private val jMono: JMono[T])
* predicate
*
*/
- final def repeat(numRepeat: Long, predicate: () => Boolean) = Flux(jMono.repeat(numRepeat, predicate))
+ final def repeat(numRepeat: Long, predicate: () => Boolean): Flux[T] = Flux.from(new ReactiveSMono[T](jMono).repeat(numRepeat, predicate))
private implicit def fluxLong2PublisherAnyToJFluxJLong2PublisherAny(mapper: (Flux[Long] => Publisher[_])): Function[JFlux[JLong], Publisher[_]] = {
new Function[JFlux[JLong], Publisher[_]] {
@@ -1282,8 +1247,10 @@ class Mono[T] private(private val jMono: JMono[T])
* onNext signal
*
*/
- // TODO: How to test this?
- final def repeatWhen(whenFactory: Flux[Long] => _ <: Publisher[_]) = Flux(jMono.repeatWhen(whenFactory))
+ final def repeatWhen(whenFactory: Flux[Long] => _ <: Publisher[_]): Flux[T] = {
+ def whenF(sFlux: SFlux[Long]): Publisher[_] = whenFactory(Flux.from[Long](sFlux))
+ Flux.from(new ReactiveSMono[T](jMono).repeatWhen(whenF))
+ }
/**
* Repeatedly subscribe to this [[Mono]] until there is an onNext signal when a companion sequence signals a
@@ -1300,8 +1267,10 @@ class Mono[T] private(private val jMono: JMono[T])
* onNext signal
*
*/
- // TODO: How to test this?
- final def repeatWhenEmpty(repeatFactory: Flux[Long] => Publisher[_]): Mono[T] = Mono[T](jMono.repeatWhenEmpty(repeatFactory))
+ final def repeatWhenEmpty(repeatFactory: Flux[Long] => Publisher[_]): Mono[T] = {
+ def repeatF(f: SFlux[Long]): Publisher[_] = repeatFactory(Flux.from(f))
+ Mono.from(new ReactiveSMono[T](jMono).repeatWhenEmpty(repeatF))
+ }
/**
* Repeatedly subscribe to this [[Mono]] until there is an onNext signal when a companion sequence signals a
@@ -1321,7 +1290,10 @@ class Mono[T] private(private val jMono: JMono[T])
*
*/
// TODO: How to test this?
- final def repeatWhenEmpty(maxRepeat: Int, repeatFactory: Flux[Long] => Publisher[_]): Mono[T] = Mono[T](jMono.repeatWhenEmpty(maxRepeat, repeatFactory))
+ final def repeatWhenEmpty(maxRepeat: Int, repeatFactory: Flux[Long] => Publisher[_]): Mono[T] = {
+ def repeatF(f: SFlux[Long]): Publisher[_] = repeatFactory(Flux.from(f))
+ Mono.from(new ReactiveSMono[T](jMono).repeatWhenEmpty(repeatF, maxRepeat))
+ }
/**
* Re-subscribes to this [[Mono]] sequence if it signals any error
@@ -1335,7 +1307,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a re-subscribing [[Mono]] on onError
*/
// TODO: How to test these retry(...)
- final def retry(): Mono[T] = Mono[T](jMono.retry())
+ final def retry(): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).retry())
/**
* Re-subscribes to this [[Mono]] sequence if it signals any error
@@ -1350,7 +1322,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a re-subscribing [[Mono]] on onError up to the specified number of retries.
*
*/
- final def retry(numRetries: Long): Mono[T] = Mono[T](jMono.retry(numRetries))
+ final def retry(numRetries: Long): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).retry(numRetries))
/**
* Re-subscribes to this [[Mono]] sequence if it signals any error
@@ -1362,7 +1334,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param retryMatcher the predicate to evaluate if retry should occur based on a given error signal
* @return a re-subscribing [[Mono]] on onError if the predicates matches.
*/
- final def retry(retryMatcher: Throwable => Boolean): Mono[T] = Mono[T](jMono.retry(retryMatcher))
+ final def retry(retryMatcher: Throwable => Boolean): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).retry(retryMatcher = retryMatcher))
/**
* Re-subscribes to this [[Mono]] sequence up to the specified number of retries if it signals any
@@ -1377,7 +1349,7 @@ class Mono[T] private(private val jMono: JMono[T])
* matches.
*
*/
- final def retry(numRetries: Long, retryMatcher: Throwable => Boolean): Mono[T] = Mono[T](jMono.retry(numRetries, retryMatcher))
+ final def retry(numRetries: Long, retryMatcher: Throwable => Boolean): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).retry(numRetries, retryMatcher))
/**
* Retries this [[Mono]] when a companion sequence signals
@@ -1393,7 +1365,10 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a re-subscribing [[Mono]] on onError when the companion [[Publisher]] produces an
* onNext signal
*/
- final def retryWhen(whenFactory: Flux[Throwable] => Publisher[_]): Mono[T] = Mono[T](jMono.retryWhen(whenFactory))
+ final def retryWhen(whenFactory: Flux[Throwable] => Publisher[_]): Mono[T] = {
+ def whenF(f: SFlux[Throwable]): Publisher[_] = whenFactory(Flux.from(f))
+ Mono.from(new ReactiveSMono[T](jMono).retryWhen(whenF))
+ }
/**
* Expect exactly one item from this [[Mono]] source or signal
@@ -1406,7 +1381,7 @@ class Mono[T] private(private val jMono: JMono[T])
*
* @return a [[Mono]] with the single item or an error signal
*/
- final def single() = Mono(jMono.single())
+ final def single(): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).single())
/**
* Subscribe to this [[Mono]] and request unbounded demand.
@@ -1420,7 +1395,7 @@ class Mono[T] private(private val jMono: JMono[T])
*
* @return a new [[Disposable]] that can be used to cancel the underlying [[Subscription]]
*/
- final def subscribe(): Disposable = jMono.subscribe()
+ final def subscribe(): Disposable = new ReactiveSMono[T](jMono).subscribe()
/**
* Subscribe a [[Consumer]] to this [[Mono]] that will consume all the
@@ -1435,7 +1410,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param consumer the consumer to invoke on each value
* @return a new [[Runnable]] to dispose the [[Subscription]]
*/
- final def subscribe(consumer: T => Unit): Disposable = jMono.subscribe(consumer)
+ final def subscribe(consumer: T => Unit): Disposable = new ReactiveSMono[T](jMono).subscribe(consumer)
/**
* Subscribe [[scala.Function1[T,Unit] Consumer]] to this [[Mono]] that will consume all the
@@ -1451,7 +1426,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param errorConsumer the consumer to invoke on error signal
* @return a new [[Runnable]] to dispose the [[org.reactivestreams.Subscription]]
*/
- final def subscribe(consumer: T => Unit, errorConsumer: Throwable => Unit): Disposable = jMono.subscribe(consumer, errorConsumer)
+ final def subscribe(consumer: T => Unit, errorConsumer: Throwable => Unit): Disposable = new ReactiveSMono[T](jMono).subscribe(consumer, errorConsumer)
/**
* Subscribe `consumer` to this [[Mono]] that will consume all the
@@ -1468,7 +1443,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param completeConsumer the consumer to invoke on complete signal
* @return a new [[Disposable]] to dispose the [[Subscription]]
*/
- final def subscribe(consumer: T => Unit, errorConsumer: Throwable => Unit, completeConsumer: => Unit): Disposable = jMono.subscribe(consumer, errorConsumer, completeConsumer)
+ final def subscribe(consumer: T => Unit, errorConsumer: Throwable => Unit, completeConsumer: => Unit): Disposable = new ReactiveSMono[T](jMono).subscribe(consumer, errorConsumer, completeConsumer)
/**
* Subscribe [[Consumer]] to this [[Mono]] that will consume all the
@@ -1487,7 +1462,7 @@ class Mono[T] private(private val jMono: JMono[T])
* for the initial [[Subscription.request request]], or null for max request
* @return a new [[Disposable]] to dispose the [[Subscription]]
*/
- final def subscribe(consumer: T => Unit, errorConsumer: Throwable => Unit, completeConsumer: => Unit, subscriptionConsumer: Subscription => Unit): Disposable = jMono.subscribe(consumer, errorConsumer, completeConsumer, subscriptionConsumer)
+ final def subscribe(consumer: T => Unit, errorConsumer: Throwable => Unit, completeConsumer: => Unit, subscriptionConsumer: Subscription => Unit): Disposable = new ReactiveSMono[T](jMono).subscribe(consumer, errorConsumer, completeConsumer, subscriptionConsumer)
/**
* Enrich a potentially empty downstream [[Context]] by adding all values
@@ -1508,7 +1483,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a contextualized [[Mono]]
* @see [[Context]]
*/
- final def subscriberContext(mergeContext: Context): Mono[T] = Mono[T](jMono.subscriberContext(mergeContext))
+ final def subscriberContext(mergeContext: Context): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).subscriberContext(mergeContext))
/**
* Enrich a potentially empty downstream [[Context]] by applying a [[Function1]]
@@ -1528,7 +1503,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a contextualized [[Mono]]
* @see [[Context]]
*/
- final def subscriberContext(doOnContext: Context => Context): Mono[T] = Mono[T](jMono.subscriberContext(doOnContext))
+ final def subscriberContext(doOnContext: Context => Context): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).subscriberContext(doOnContext))
/**
* Run the requests to this Publisher [[Mono]] on a given worker assigned by the supplied [[Scheduler]].
@@ -1542,8 +1517,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param scheduler a checked [[reactor.core.scheduler.Scheduler.Worker]] factory
* @return an asynchronously requesting [[Mono]]
*/
- // TODO: How to test this?
- final def subscribeOn(scheduler: Scheduler): Mono[T] = Mono[T](jMono.subscribeOn(scheduler))
+ final def subscribeOn(scheduler: Scheduler): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).subscribeOn(scheduler))
/**
* Subscribe the [[Mono]] with the givne [[Subscriber]] and return it.
@@ -1553,7 +1527,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return the passed [[Subscriber]] after subscribing it to this { @link Mono}
*/
// TODO: How to test this?
- final def subscribeWith[E <: Subscriber[_ >: T]](subscriber: E): E = jMono.subscribeWith(subscriber)
+ final def subscribeWith[E <: Subscriber[_ >: T]](subscriber: E): E = new ReactiveSMono[T](jMono).subscribeWith(subscriber)
/**
* Provide an alternative [[Mono]] if this mono is completed without data
@@ -1566,7 +1540,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return an alternating [[Mono]] on source onComplete without elements
* @see [[Flux.switchIfEmpty]]
*/
- final def switchIfEmpty(alternate: Mono[_ <: T]): Mono[T] = Mono[T](jMono.switchIfEmpty(alternate.jMono))
+ final def switchIfEmpty(alternate: Mono[_ <: T]): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).switchIfEmpty(alternate.jMono))
/**
* Tag this mono with a key/value pair. These can be retrieved as a [[Stream]] of
@@ -1578,7 +1552,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param value a tag value
* @return the same sequence, but bearing tags
*/
- final def tag(key: String, value: String) = Mono(jMono.tag(key, value))
+ final def tag(key: String, value: String): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).tag(key, value))
/**
* Give this Mono a chance to resolve within a specified time frame but complete if it
@@ -1591,7 +1565,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a new [[Mono]] that will propagate the signals from the source unless
* no signal is received for `duration`, in which case it completes.
*/
- final def take(duration: Duration) = Mono(jMono.take(duration))
+ final def take(duration: Duration): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).take(duration))
/**
* Give this Mono a chance to resolve within a specified time frame but complete if it
@@ -1605,7 +1579,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a new [[Mono]] that will propagate the signals from the source unless
* no signal is received for `duration`, in which case it completes.
*/
- final def take(duration: Duration, timer: Scheduler) = Mono(jMono.take(duration, timer))
+ final def take(duration: Duration, timer: Scheduler): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).take(duration, timer))
/**
* Give this Mono a chance to resolve before a companion [[Publisher]] emits. If
@@ -1618,7 +1592,7 @@ class Mono[T] private(private val jMono: JMono[T])
* a signal is first received from the companion [[Publisher]], in which case it
* completes.
*/
- final def takeUntilOther(other: Publisher[_]) = Mono(jMono.takeUntilOther(other))
+ final def takeUntilOther(other: Publisher[_]): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).takeUntilOther(other))
implicit def jMonoVoid2jMonoUnit(jMonoVoid: JMono[Void]): JMono[Unit] = jMonoVoid.map((_: Void) => ())
@@ -1632,7 +1606,7 @@ class Mono[T] private(private val jMono: JMono[T])
*
* @return a [[Mono]] igoring its payload (actively dropping)
*/
- final def `then`(): Mono[Unit] = Mono[Unit](jMono.`then`())
+ final def `then`(): Mono[Unit] = Mono.from(new ReactiveSMono[T](jMono).`then`())
/**
* Ignore element from this [[Mono]] and transform its completion signal into the
@@ -1646,7 +1620,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @tparam V the element type of the supplied Mono
* @return a new [[Mono]] that emits from the supplied [[Mono]]
*/
- final def `then`[V](other: Mono[V]): Mono[V] = Mono[V](jMono.`then`(other))
+ final def `then`[V](other: Mono[V]): Mono[V] = Mono.from(new ReactiveSMono[T](jMono).`then`(new ReactiveSMono[V](other.jMono)))
/**
* Return a `Mono[Unit]` that waits for this [[Mono]] to complete then
@@ -1660,7 +1634,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a new [[Mono]] completing when both publishers have completed in
* sequence
*/
- final def thenEmpty(other: Publisher[Unit]): Mono[Unit] = Mono[Unit]((jMono: JMono[T]).thenEmpty(other))
+ final def thenEmpty(other: MapablePublisher[Unit]): Mono[Unit] = Mono.from(new ReactiveSMono[T](jMono).thenEmpty(other))
/**
* Ignore element from this mono and transform the completion signal into a
@@ -1674,7 +1648,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a new [[Flux]] that emits from the supplied [[Publisher]] after
* this Mono completes.
*/
- final def thenMany[V](other: Publisher[V]): Flux[V] = Flux(jMono.thenMany(other))
+ final def thenMany[V](other: Publisher[V]): Flux[V] = Flux.from(new ReactiveSMono[T](jMono).thenMany(other))
/**
* Signal a [[java.util.concurrent.TimeoutException]] in case an item doesn't arrive before the given period.
@@ -1685,7 +1659,12 @@ class Mono[T] private(private val jMono: JMono[T])
* @param timeout the timeout before the onNext signal from this [[Mono]]
* @return an expirable [[Mono]]}
*/
- final def timeout(timeout: Duration) = Mono(jMono.timeout(timeout))
+ final def timeout(timeout: Duration): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).timeout(timeout))
+
+ private def optionMonoExtendT2OptionSMonoT[T](f: Option[Mono[_ <: T]]): Option[SMono[T]] = f map {m: Mono[_ <: T] => {
+ val mt: JMono[T] = m.as(Mono.from[T]).jMono
+ new ReactiveSMono[T](mt)
+ }}
/**
* Switch to a fallback [[Mono]] in case an item doesn't arrive before the given period.
@@ -1699,7 +1678,9 @@ class Mono[T] private(private val jMono: JMono[T])
* @param fallback the fallback [[Mono]] to subscribe when a timeout occurs
* @return an expirable [[Mono]] with a fallback [[Mono]]
*/
- final def timeout(timeout: Duration, fallback: Option[Mono[_ <: T]]) = Mono[T](jMono.timeout(timeout, fallback.orNull))
+ final def timeout(timeout: Duration, fallback: Option[Mono[_ <: T]]): Mono[T] = {
+ Mono.from(new ReactiveSMono[T](jMono).timeout(timeout, optionMonoExtendT2OptionSMonoT[T](fallback)))
+ }
/**
* Signal a [[java.util.concurrent.TimeoutException]] error in case an item doesn't arrive before the given period.
@@ -1711,7 +1692,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param timer a time-capable [[Scheduler]] instance to run on
* @return an expirable [[Mono]]
*/
- final def timeout(timeout: Duration, timer: Scheduler): Mono[T] = Mono[T](jMono.timeout(timeout, timer))
+ final def timeout(timeout: Duration, timer: Scheduler): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).timeout(timeout, timer = timer))
/**
* Switch to a fallback [[Mono]] in case an item doesn't arrive before the given period.
@@ -1726,7 +1707,9 @@ class Mono[T] private(private val jMono: JMono[T])
* @param timer a time-capable [[Scheduler]] instance to run on
* @return an expirable [[Mono]] with a fallback [[Mono]]
*/
- final def timeout(timeout: Duration, fallback: Option[Mono[_ <: T]], timer: Scheduler): Mono[T] = Mono[T](jMono.timeout(timeout, fallback.orNull[Mono[_ <: T]], timer))
+ final def timeout(timeout: Duration, fallback: Option[Mono[_ <: T]], timer: Scheduler): Mono[T] = {
+ Mono.from(new ReactiveSMono[T](jMono).timeout(timeout, optionMonoExtendT2OptionSMonoT[T](fallback), timer))
+ }
/**
@@ -1739,9 +1722,9 @@ class Mono[T] private(private val jMono: JMono[T])
* @param firstTimeout the timeout [[Publisher]] that must not emit before the first signal from this [[Mono]]
* @tparam U the element type of the timeout Publisher
* @return an expirable [[Mono]] if the first item does not come before a [[Publisher]] signal
- *
+ * @see [[SMono.timeoutWhen]]
*/
- final def timeout[U](firstTimeout: Publisher[U]) = Mono[T](jMono.timeout(firstTimeout))
+ final def timeout[U](firstTimeout: Publisher[U]): Mono[T] = Mono.from(new ReactiveSMono[T](jMono).timeoutWhen(firstTimeout))
/**
* Switch to a fallback [[Publisher]] in case the item from this [[Mono]] has
@@ -1758,7 +1741,9 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a first then per-item expirable [[Mono]] with a fallback [[Publisher]]
*
*/
- final def timeout[U](firstTimeout: Publisher[U], fallback: Mono[_ <: T]) = Mono[T](jMono.timeout(firstTimeout, fallback))
+ final def timeout[U](firstTimeout: Publisher[U], fallback: Mono[_ <: T]): Mono[T] = {
+ Mono.from(new ReactiveSMono[T](jMono).timeoutWhen(firstTimeout, optionMonoExtendT2OptionSMonoT[T](Option(fallback))))
+ }
/**
* Emit a [[Tuple2]] pair of T1 [[Long]] current system time in
@@ -1770,7 +1755,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @return a timestamped [[Mono]]
*/
// TODO: How to test timestamp(...) with the actual timestamp?
- final def timestamp() = new Mono[(Long, T)](jMono.timestamp().map((t2: Tuple2[JLong, T]) => (Long2long(t2.getT1), t2.getT2)))
+ final def timestamp(): Mono[(Long, T)] = Mono.from(new ReactiveSMono[T](jMono).timestamp())
/**
* Emit a [[Tuple2]] pair of T1 [[Long]] current system time in
@@ -1782,7 +1767,7 @@ class Mono[T] private(private val jMono: JMono[T])
* @param scheduler a [[Scheduler]] instance to read time from
* @return a timestamped [[Mono]]
*/
- final def timestamp(scheduler: Scheduler): Mono[(Long, T)] = Mono[(Long, T)](jMono.timestamp(scheduler).map((t2: Tuple2[JLong, T]) => (Long2long(t2.getT1), t2.getT2)))
+ final def timestamp(scheduler: Scheduler): Mono[(Long, T)] = Mono.from(new ReactiveSMono[T](jMono).timestamp(scheduler))
/**
* Transform this [[Mono]] into a [[Future]] completing on onNext or onComplete and failing on
@@ -1794,15 +1779,7 @@ class Mono[T] private(private val jMono: JMono[T])
*
* @return a [[Future]]
*/
- final def toFuture: Future[T] = {
- val promise = Promise[T]()
- jMono.toFuture.handle[Unit]((value: T, throwable: Throwable) => {
- Option(value).foreach(v => promise.complete(Try(v)))
- Option(throwable).foreach(t => promise.failure(t))
- ()
- })
- promise.future
- }
+ final def toFuture: Future[T] = new ReactiveSMono[T](jMono).toFuture
/**
* Transform this [[Mono]] in order to generate a target [[Mono]]. Unlike [[Mono.compose]], the
@@ -1819,9 +1796,14 @@ class Mono[T] private(private val jMono: JMono[T])
* @see [[Mono.compose]] for deferred composition of [[Mono]] for each [[Subscriber]]
* @see [[Mono.as]] for a loose conversion to an arbitrary type
*/
- final def transform[V](transformer: Mono[T] => Publisher[V]): Mono[V] = Mono[V](jMono.transform[V]((_: JMono[T]) => transformer(Mono.this)))
+ final def transform[V](transformer: Mono[T] => Publisher[V]): Mono[V] = {
- final def asJava(): JMono[T] = jMono
+ def transformFunction(sMono: SMono[T]): Publisher[V] = transformer(Mono.from[T](sMono))
+
+ Mono.from(new ReactiveSMono[T](jMono).transform[V](transformFunction))
+ }
+
+ final def asJava(): JMono[T] = new ReactiveSMono[T](jMono).asJava()
}
object Mono {
@@ -1832,23 +1814,16 @@ object Mono {
* @param javaMono The underlying Java Mono
* @tparam T The value type that will be emitted by this mono
* @return Wrapper of Java Mono
+ * @deprecated
*/
def apply[T](javaMono: JMono[T]): Mono[T] = new Mono[T](javaMono)
- def create[T](callback: MonoSink[T] => Unit): Mono[T] = {
- new Mono[T](
- JMono.create(new Consumer[MonoSink[T]] {
- override def accept(t: MonoSink[T]): Unit = callback(t)
- })
- )
- }
+ def create[T](callback: MonoSink[T] => Unit): Mono[T] = Mono.from(SMono.create(callback))
def defer[T](supplier: () => Mono[T]): Mono[T] = {
- new Mono[T](
- JMono.defer(new Supplier[JMono[T]] {
- override def get(): JMono[T] = supplier().jMono
- })
- )
+ def supplierF(): SMono[T] = new ReactiveSMono[T](supplier().jMono)
+
+ Mono.from(SMono.defer[T](() => supplierF()))
}
/**
@@ -1915,7 +1890,10 @@ object Mono {
* @tparam T The type of the function result.
* @return a [[Mono]].
*/
- def first[T](monos: Mono[_ <: T]*): Mono[T] = Mono[T](JMono.first[T](monos.map(_.jMono): _*))
+ def first[T](monos: Mono[_ <: T]*): Mono[T] = {
+ val sMonos: Seq[SMono[T]] = monos.map((m: Mono[_]) => new ReactiveSMono[T](m.asJava().asInstanceOf[Publisher[T]]))
+ Mono.from(SMono.firstEmitter[T](sMonos: _*))
+ }
/**
* Pick the first result coming from any of the given monos and populate a new `Mono`.
@@ -1928,7 +1906,10 @@ object Mono {
* @tparam T The type of the function result.
* @return a [[Mono]].
*/
- def first[T](monos: Iterable[_ <: Mono[_ <: T]]): Mono[T] = Mono[T](JMono.first[T](monos.map(_.asJava()).asJava))
+ def first[T](monos: Iterable[_ <: Mono[_ <: T]]): Mono[T] = {
+ val sMonos: Seq[SMono[T]] = monos.map((m: Mono[_]) => new ReactiveSMono[T](m.asJava().asInstanceOf[Publisher[T]])).toSeq
+ Mono.from(SMono.firstEmitter[T](sMonos: _*))
+ }
/**
* Expose the specified [[Publisher]] with the [[Mono]] API, and ensure it will emit 0 or 1 item.
diff --git a/src/main/scala/reactor/core/scala/publisher/PimpMyPublisher.scala b/src/main/scala/reactor/core/scala/publisher/PimpMyPublisher.scala
index 10b22af0..00b95819 100644
--- a/src/main/scala/reactor/core/scala/publisher/PimpMyPublisher.scala
+++ b/src/main/scala/reactor/core/scala/publisher/PimpMyPublisher.scala
@@ -1,11 +1,9 @@
package reactor.core.scala.publisher
-import reactor.core.publisher.{Flux => JFlux, Mono => JMono}
import java.lang.{Long => JLong}
-import java.util
-import scala.collection.JavaConverters._
-import scala.collection.mutable
+import reactor.core.publisher.{Flux => JFlux, Mono => JMono}
+
import scala.language.implicitConversions
/**
@@ -25,4 +23,9 @@ object PimpMyPublisher {
implicit def jFluxJInt2JFluxInt(jFluxInt: JFlux[Integer]): JFlux[Int] = jFluxInt.map[Int]((i: Integer) => Integer2int(i))
+ implicit def jMonoJLong2JMonoLong(mono: JMono[JLong]): JMono[Long] = mono.map(Long2long(_: JLong))
+
+ implicit def jFlux2SFlux[T](jFlux: JFlux[T]): SFlux[T] = new ReactiveSFlux[T](jFlux)
+
+ implicit def jMono2SMono[T](jMono: JMono[T]): SMono[T] = new ReactiveSMono[T](jMono)
}
diff --git a/src/main/scala/reactor/core/scala/publisher/SFlux.scala b/src/main/scala/reactor/core/scala/publisher/SFlux.scala
new file mode 100644
index 00000000..62cf6648
--- /dev/null
+++ b/src/main/scala/reactor/core/scala/publisher/SFlux.scala
@@ -0,0 +1,517 @@
+package reactor.core.scala.publisher
+
+import java.lang.{Boolean => JBoolean, Iterable => JIterable, Long => JLong}
+import java.util
+import java.util.concurrent.Callable
+import java.util.function.{BiFunction, Function, Supplier}
+import java.util.logging.Level
+import java.util.{Collection => JCollection, List => JList, Map => JMap}
+
+import org.reactivestreams.{Publisher, Subscriber}
+import reactor.core.publisher.FluxSink.OverflowStrategy
+import reactor.core.publisher.{BufferOverflowStrategy, FluxSink, Signal, SignalType, SynchronousSink, Flux => JFlux, GroupedFlux => JGroupedFlux}
+import reactor.core.scala.Scannable
+import reactor.core.scala.publisher.PimpMyPublisher._
+import reactor.core.scheduler.{Scheduler, Schedulers}
+import reactor.core.{Disposable, publisher, Scannable => JScannable}
+import reactor.util.Logger
+import reactor.util.concurrent.Queues.{SMALL_BUFFER_SIZE, XS_BUFFER_SIZE}
+import reactor.util.function.{Tuple2, Tuple3, Tuple4, Tuple5, Tuple6}
+
+import scala.collection.JavaConverters._
+import scala.collection.mutable
+import scala.concurrent.duration.Duration
+import scala.concurrent.duration.Duration.Infinite
+import scala.language.postfixOps
+import scala.reflect.ClassTag
+
+trait SFlux[T] extends SFluxLike[T, SFlux] with MapablePublisher[T] {
+ self =>
+
+ final def all(predicate: T => Boolean): SMono[Boolean] = new ReactiveSMono[Boolean](coreFlux.all(predicate).map((b: JBoolean) => Boolean2boolean(b)))
+
+ final def any(predicate: T => Boolean): SMono[Boolean] = new ReactiveSMono[Boolean](coreFlux.any(predicate).map((b: JBoolean) => Boolean2boolean(b)))
+
+ final def as[P](transformer: SFlux[T] => P): P = {
+ coreFlux.as[P](new Function[JFlux[T], P] {
+ override def apply(t: JFlux[T]): P = transformer(t)
+ })
+ }
+
+ final def asJava(): JFlux[T] = coreFlux
+
+ final def blockFirst(timeout: Duration = Duration.Inf): Option[T] = timeout match {
+ case _: Infinite => Option(coreFlux.blockFirst())
+ case t => Option(coreFlux.blockFirst(t))
+ }
+
+ final def blockLast(timeout: Duration = Duration.Inf): Option[T] = timeout match {
+ case _: Infinite => Option(coreFlux.blockLast())
+ case t => Option(coreFlux.blockLast(t))
+ }
+
+ final def buffer[C >: mutable.Buffer[T]](maxSize: Int = Int.MaxValue, bufferSupplier: () => C = () => mutable.ListBuffer.empty[T]): SFlux[Seq[T]] = {
+ new ReactiveSFlux[Seq[T]](coreFlux.buffer(maxSize, new Supplier[JList[T]] {
+ override def get(): JList[T] = {
+ bufferSupplier().asInstanceOf[mutable.Buffer[T]].asJava
+ }
+ }).map((l: JList[T]) => l.asScala))
+ }
+
+ final def bufferTimeSpan(timespan: Duration, timer: Scheduler = Schedulers.parallel())(timeshift: Duration = timespan): SFlux[Seq[T]] =
+ new ReactiveSFlux[Seq[T]](coreFlux.buffer(timespan, timeshift, timer).map((l: JList[T]) => l.asScala))
+
+ final def bufferPublisher(other: Publisher[_]): SFlux[Seq[T]] = new ReactiveSFlux[Seq[T]](coreFlux.buffer(other).map((l: JList[T]) => l.asScala))
+
+ final def bufferTimeout[C >: mutable.Buffer[T]](maxSize: Int, timespan: Duration, timer: Scheduler = Schedulers.parallel(), bufferSupplier: () => C = () => mutable.ListBuffer.empty[T]): SFlux[Seq[T]] = {
+ new ReactiveSFlux[Seq[T]](coreFlux.bufferTimeout(maxSize, timespan, timer, new Supplier[JList[T]] {
+ override def get(): JList[T] = {
+ bufferSupplier().asInstanceOf[mutable.Buffer[T]].asJava
+ }
+ }).map((l: JList[T]) => l.asScala))
+ }
+
+ final def bufferUntil(predicate: T => Boolean, cutBefore: Boolean = false): SFlux[Seq[T]] = new ReactiveSFlux[Seq[T]](coreFlux.bufferUntil(predicate, cutBefore).map((l: JList[T]) => l.asScala))
+
+ final def bufferWhen[U, V, C >: mutable.Buffer[T]](bucketOpening: Publisher[U], closeSelector: U => Publisher[V], bufferSupplier: () => C = () => mutable.ListBuffer.empty[T]): SFlux[Seq[T]] =
+ new ReactiveSFlux[Seq[T]](coreFlux.bufferWhen(bucketOpening, closeSelector, new Supplier[JList[T]] {
+ override def get(): JList[T] = {
+ bufferSupplier().asInstanceOf[mutable.Buffer[T]].asJava
+ }
+ }).map((l: JList[T]) => l.asScala))
+
+ final def bufferWhile(predicate: T => Boolean): SFlux[Seq[T]] = new ReactiveSFlux[Seq[T]](coreFlux.bufferWhile(predicate).map((l: JList[T]) => l.asScala))
+
+ final def cache(history: Int = Int.MaxValue, ttl: Duration = Duration.Inf): SFlux[T] = {
+ ttl match {
+ case _: Duration.Infinite => new ReactiveSFlux[T](coreFlux.cache(history))
+ case _ => new ReactiveSFlux[T](coreFlux.cache(history, ttl))
+ }
+ }
+
+ final def cast[E](implicit classTag: ClassTag[E]): SFlux[E] = new ReactiveSFlux[E](coreFlux.cast(classTag.runtimeClass.asInstanceOf[Class[E]]))
+
+ final def collectSeq(): SMono[Seq[T]] = new ReactiveSMono[Seq[T]](coreFlux.collectList().map((l: JList[T]) => l.asScala))
+
+ final def collectMap[K](keyExtractor: T => K): SMono[Map[K, T]] = collectMap[K, T](keyExtractor, (t: T) => t)
+
+ final def collectMap[K, V](keyExtractor: T => K, valueExtractor: T => V, mapSupplier: () => mutable.Map[K, V] = () => mutable.HashMap.empty[K, V]): SMono[Map[K, V]] =
+ new ReactiveSMono[Map[K, V]](coreFlux.collectMap[K, V](keyExtractor, valueExtractor, new Supplier[JMap[K, V]] {
+ override def get(): JMap[K, V] = mapSupplier().asJava
+ }).map((m: JMap[K, V]) => m.asScala.toMap))
+
+ final def collectMultimap[K](keyExtractor: T => K): SMono[Map[K, Traversable[T]]] = collectMultimap(keyExtractor, (t: T) => t)
+
+ final def collectMultimap[K, V](keyExtractor: T => K, valueExtractor: T => V, mapSupplier: () => mutable.Map[K, util.Collection[V]] = () => mutable.HashMap.empty[K, util.Collection[V]]): SMono[Map[K, Traversable[V]]] =
+ new ReactiveSMono[Map[K, Traversable[V]]](coreFlux.collectMultimap[K, V](keyExtractor, valueExtractor,
+ new Supplier[util.Map[K, util.Collection[V]]] {
+ override def get(): util.Map[K, util.Collection[V]] = {
+ mapSupplier().asJava
+ }
+ }).map((m: JMap[K, JCollection[V]]) => m.asScala.toMap.mapValues((vs: JCollection[V]) => vs.asScala.toSeq)))
+
+ final def collectSortedSeq(ordering: Ordering[T] = None.orNull): SMono[Seq[T]] = new ReactiveSMono[Seq[T]](coreFlux.collectSortedList(ordering).map((l: JList[T]) => l.asScala))
+
+ final def compose[V](transformer: Flux[T] => Publisher[V]): SFlux[V] = new ReactiveSFlux[V](coreFlux.compose[V](transformer))
+
+ final def concatMapDelayError[V](mapper: T => Publisher[_ <: V], delayUntilEnd: Boolean = false, prefetch: Int = XS_BUFFER_SIZE): SFlux[V] =
+ new ReactiveSFlux[V](coreFlux.concatMapDelayError[V](mapper, delayUntilEnd, prefetch))
+
+ final def concatMapIterable[R](mapper: T => Iterable[_ <: R], prefetch: Int = XS_BUFFER_SIZE): SFlux[R] =
+ new ReactiveSFlux[R](coreFlux.concatMapIterable(new Function[T, JIterable[R]] {
+ override def apply(t: T): JIterable[R] = mapper(t)
+ }, prefetch))
+
+ final def concatWith(other: Publisher[_ <: T]): SFlux[T] = coreFlux.concatWith(other)
+
+ private[publisher] def coreFlux: JFlux[T]
+
+ final def count(): SMono[Long] = new ReactiveSMono[Long](coreFlux.count())
+
+ /**
+ * Provide a default unique value if this sequence is completed without any data
+ *
+ *
+ *
+ * + * @param defaultV the alternate value if this sequence is empty + * @return a new [[SFlux]] + */ + final def defaultIfEmpty(defaultV: T): SFlux[T] = new ReactiveSFlux[T](coreFlux.defaultIfEmpty(defaultV)) + + final def delayElements(delay: Duration, timer: Scheduler = Schedulers.parallel()): SFlux[T] = new ReactiveSFlux[T](coreFlux.delayElements(delay, timer)) + + final def delaySequence(delay: Duration, timer: Scheduler = Schedulers.parallel()): SFlux[T] = new ReactiveSFlux[T](coreFlux.delaySequence(delay, timer)) + + final def delaySubscription(delay: Duration, timer: Scheduler = Schedulers.parallel()): SFlux[T] = new ReactiveSFlux[T](coreFlux.delaySubscription(delay, timer)) + + final def delaySubscription[U](subscriptionDelay: Publisher[U]): SFlux[T] = new ReactiveSFlux[T](coreFlux.delaySubscription(subscriptionDelay)) + + final def dematerialize[X](): Flux[X] = Flux(coreFlux.dematerialize[X]()) + + final def distinct(): SFlux[T] = distinct(identity) + + final def distinct[V](keySelector: T => V): SFlux[T] = new ReactiveSFlux[T](coreFlux.distinct[V](keySelector)) + + final def distinctUntilChanged(): SFlux[T] = distinctUntilChanged(identity) + + final def distinctUntilChanged[V](keySelector: T => V, keyComparator: (V, V) => Boolean = (x: V, y: V) => x == y): SFlux[T] = new ReactiveSFlux[T](coreFlux.distinctUntilChanged[V](keySelector, keyComparator)) + + final def doAfterTerminate(afterTerminate: () => Unit): SFlux[T] = new ReactiveSFlux[T](coreFlux.doAfterTerminate(afterTerminate)) + + final def doOnCancel(onCancel: () => Unit): SFlux[T] = new ReactiveSFlux[T](coreFlux.doOnCancel(onCancel)) + + final def doOnComplete(onComplete: () => Unit): SFlux[T] = new ReactiveSFlux[T](coreFlux.doOnComplete(onComplete)) + + final def doOnEach(signalConsumer: Signal[T] => Unit): SFlux[T] = new ReactiveSFlux[T](coreFlux.doOnEach(signalConsumer)) + + final def doOnError(onError: Throwable => Unit): SFlux[T] = new ReactiveSFlux[T](coreFlux.doOnError(onError)) + + final def doOnNext(onNext: T => Unit): SFlux[T] = new ReactiveSFlux[T](coreFlux.doOnNext(onNext)) + + final def doOnRequest(f: Long => Unit): SFlux[T] = new ReactiveSFlux[T](coreFlux.doOnRequest(f)) + + final def doOnTerminate(onTerminate: () => Unit): SFlux[T] = new ReactiveSFlux[T](coreFlux.doOnTerminate(onTerminate)) + + final def doFinally(onFinally: SignalType => Unit): SFlux[T] = new ReactiveSFlux[T](coreFlux.doFinally(onFinally)) + + final def elapsed(scheduler: Scheduler = Schedulers.parallel()): SFlux[(Long, T)] = new ReactiveSFlux[(Long, T)](coreFlux.elapsed(scheduler).map(new Function[Tuple2[JLong, T], (Long, T)] { + override def apply(t: Tuple2[JLong, T]): (Long, T) = (Long2long(t.getT1), t.getT2) + })) + + final def elementAt(index: Int, defaultValue: Option[T] = None): SMono[T] = new ReactiveSMono[T]( + defaultValue.map((t: T) => coreFlux.elementAt(index, t)) + .getOrElse(coreFlux.elementAt(index))) + + final def expandDeep(expander: T => Publisher[_ <: T], capacity: Int = SMALL_BUFFER_SIZE): SFlux[T] = coreFlux.expandDeep(expander, capacity) + + final def expand(expander: T => Publisher[_ <: T], capacityHint: Int = SMALL_BUFFER_SIZE): SFlux[T] = coreFlux.expand(expander, capacityHint) + + final def filter(p: T => Boolean): SFlux[T] = coreFlux.filter(p) + + final def filterWhen(asyncPredicate: T => _ <: MapablePublisher[Boolean], bufferSize: Int = SMALL_BUFFER_SIZE): SFlux[T] = { + val asyncPredicateFunction = new Function[T, Publisher[JBoolean]] { + override def apply(t: T): Publisher[JBoolean] = asyncPredicate(t).map(Boolean2boolean(_)) + } + coreFlux.filterWhen(asyncPredicateFunction, bufferSize) + } + + final def flatMap[R](mapperOnNext: T => Publisher[_ <: R], + mapperOnError: Throwable => Publisher[_ <: R], + mapperOnComplete: () => Publisher[_ <: R]): SFlux[R] = coreFlux.flatMap[R](mapperOnNext, mapperOnError, mapperOnComplete) + + final def flatMapIterable[R](mapper: T => Iterable[_ <: R], prefetch: Int = SMALL_BUFFER_SIZE): SFlux[R] = coreFlux.flatMapIterable(new Function[T, JIterable[R]] { + override def apply(t: T): JIterable[R] = mapper(t) + }, prefetch) + + final def flatMapSequential[R](mapper: T => Publisher[_ <: R], maxConcurrency: Int = SMALL_BUFFER_SIZE, prefetch: Int = XS_BUFFER_SIZE, delayError: Boolean = false): SFlux[R] = + if (!delayError) coreFlux.flatMapSequential[R](mapper, maxConcurrency, prefetch) + else coreFlux.flatMapSequentialDelayError[R](mapper, maxConcurrency, prefetch) + + + final def groupBy[K](keyMapper: T => K): SFlux[GroupedFlux[K, T]] = + groupBy(keyMapper, identity) + + final def groupBy[K, V](keyMapper: T => K, valueMapper: T => V, prefetch: Int = SMALL_BUFFER_SIZE): SFlux[GroupedFlux[K, V]] = { + val jFluxOfGroupedFlux: JFlux[JGroupedFlux[K, V]] = coreFlux.groupBy(keyMapper, valueMapper, prefetch) + new ReactiveSFlux[GroupedFlux[K, V]](jFluxOfGroupedFlux.map((jg: JGroupedFlux[K, V]) => GroupedFlux(jg))) + } + + final def handle[R](handler: (T, SynchronousSink[R]) => Unit): SFlux[R] = coreFlux.handle[R](handler) + + final def hasElement(value: T): SMono[Boolean] = new ReactiveSMono[JBoolean](coreFlux.hasElement(value)).map(Boolean2boolean) + + final def hasElements: SMono[Boolean] = new ReactiveSMono[JBoolean](coreFlux.hasElements()).map(Boolean2boolean) + + final def ignoreElements(): SMono[T] = coreFlux.ignoreElements() + + final def index(): SFlux[(Long, T)] = index[(Long, T)]((x, y) => (x, y)) + + final def index[I](indexMapper: (Long, T) => I): SFlux[I] = new ReactiveSFlux[I](coreFlux.index[I](new BiFunction[JLong, T, I] { + override def apply(t: JLong, u: T) = indexMapper(Long2long(t), u) + })) + + final def last(defaultValue: Option[T] = None): SMono[T] = new ReactiveSMono[T]( + defaultValue map (coreFlux.last(_)) getOrElse coreFlux.last() + ) + + /** + * Observe all Reactive Streams signals and use [[Logger]] support to handle trace implementation. Default will + * use [[Level.INFO]] and java.util.logging. If SLF4J is available, it will be used instead. + *
+ *
+ *
+ * The default log category will be "reactor.*", a generated operator suffix will + * complete, e.g. "reactor.Flux.Map". + * + * @return a new unaltered [[SFlux]] + */ + final def log(category: String = None.orNull[String]): SFlux[T] = coreFlux.log(category) + + override final def map[V](mapper: T => V): SFlux[V] = coreFlux.map[V](mapper) + + final def materialize(): SFlux[Signal[T]] = coreFlux.materialize() + + final def mergeWith(other: Publisher[_ <: T]): SFlux[T] = coreFlux.mergeWith(other) + + final def name(name: String): SFlux[T] = coreFlux.name(name) + + final def next(): SMono[T] = coreFlux.next() + + final def nonEmpty: SMono[Boolean] = hasElements + + final def ofType[U](implicit classTag: ClassTag[U]): SFlux[U] = coreFlux.ofType(classTag.runtimeClass.asInstanceOf[Class[U]]) + + final def onBackpressureBuffer(): SFlux[T] = coreFlux.onBackpressureBuffer() + + final def onBackpressureBuffer(maxSize: Int): SFlux[T] = coreFlux.onBackpressureBuffer(maxSize) + + final def onBackpressureBuffer(maxSize: Int, onOverflow: T => Unit): SFlux[T] = coreFlux.onBackpressureBuffer(maxSize, onOverflow) + + final def onBackpressureBuffer(maxSize: Int, bufferOverflowStrategy: BufferOverflowStrategy): SFlux[T] = coreFlux.onBackpressureBuffer(maxSize, bufferOverflowStrategy) + + final def onBackpressureBuffer(maxSize: Int, onBufferOverflow: T => Unit, bufferOverflowStrategy: BufferOverflowStrategy): SFlux[T] = coreFlux.onBackpressureBuffer(maxSize, onBufferOverflow, bufferOverflowStrategy) + + final def onBackpressureDrop(): SFlux[T] = coreFlux.onBackpressureDrop() + + final def onBackpressureDrop(onDropped: T => Unit): SFlux[T] = coreFlux.onBackpressureDrop(onDropped) + + final def onBackpressureError(): SFlux[T] = coreFlux.onBackpressureError() + + final def onBackpressureLatest(): SFlux[T] = coreFlux.onBackpressureLatest() + + final def onErrorMap(mapper: Throwable => _ <: Throwable): SFlux[T] = coreFlux.onErrorMap(mapper) + + final def onErrorReturn(fallbackValue: T, predicate: Throwable => Boolean = (_: Throwable ) => true): SFlux[T] = coreFlux.onErrorReturn(predicate, fallbackValue) + + final def or(other: Publisher[_ <: T]): SFlux[T] = coreFlux.or(other) + + final def publishNext(): SMono[T] = coreFlux.publishNext() + + final def reduce(aggregator: (T, T) => T): SMono[T] = coreFlux.reduce(aggregator) + + final def reduceWith[A](initial: () => A, accumulator: (A, T) => A): SMono[A] = coreFlux.reduceWith[A](initial, accumulator) + + final def repeat(numRepeat: Long = Long.MaxValue, predicate: () => Boolean = () => true): SFlux[T] = coreFlux.repeat(numRepeat, predicate) + + final def retry(numRetries: Long = Long.MaxValue, retryMatcher: Throwable => Boolean = (_: Throwable) => true): SFlux[T] = coreFlux.retry(numRetries, retryMatcher) + + final def retryWhen(whenFactory: SFlux[Throwable] => Publisher[_]): SFlux[T] = { + val func = new Function[JFlux[Throwable], Publisher[_]] { + override def apply(t: JFlux[Throwable]): Publisher[_] = whenFactory(new ReactiveSFlux[Throwable](t)) + } + coreFlux.retryWhen(func) + } + + final def sample(timespan: Duration): SFlux[T] = coreFlux.sample(timespan) + + final def sampleFirst(timespan: Duration): SFlux[T] = coreFlux.sampleFirst(timespan) + + final def scan(accumulator: (T, T) => T): SFlux[T] = coreFlux.scan(accumulator) + + final def scan[A](initial: A, accumulator: (A, T) => A): SFlux[A] = coreFlux.scan(initial, accumulator) + + final def scanWith[A](initial: () => A, accumulator: (A, T) => A): SFlux[A] = coreFlux.scanWith(initial, accumulator) + + final def single(defaultValue: Option[T] = None): SMono[T] = { + defaultValue map { coreFlux.single(_) } getOrElse {coreFlux.single()}: publisher.Mono[T] + } + + final def singleOrEmpty(): SMono[T] = coreFlux.singleOrEmpty() + + final def skip(timespan: Duration, timer: Scheduler = Schedulers.parallel): SFlux[T] = coreFlux.skip(timespan, timer) + + final def skipLast(n: Int): SFlux[T] = coreFlux.skipLast(n) + + final def skipUntil(untilPredicate: T => Boolean): SFlux[T] = coreFlux.skipUntil(untilPredicate) + + final def skipWhile(skipPredicate: T => Boolean): SFlux[T] = coreFlux.skipWhile(skipPredicate) + + final def sort(): SFlux[T] = coreFlux.sort() + + final def sort(sortFunction: Ordering[T]): SFlux[T] = coreFlux.sort(sortFunction) + + final def startWith(iterable: Iterable[_ <: T]): SFlux[T] = coreFlux.startWith(iterable) + + final def startWith(values: T*): SFlux[T] = coreFlux.startWith(values: _*) + + final def startWith(publisher: Publisher[_ <: T]): Flux[T] = coreFlux.startWith(publisher) + + final def subscribe(): Disposable = coreFlux.subscribe() + + /** + * Provide an alternative if this sequence is completed without any data + *
+ *
+ *
+ * + * @param alternate the alternate publisher if this sequence is empty + * @return an alternating [[SFlux]] on source onComplete without elements + */ + final def switchIfEmpty(alternate: Publisher[_ <: T]): SFlux[T] = coreFlux.switchIfEmpty(alternate) + + final def switchMap[V](fn: T => Publisher[_ <: V], prefetch: Int = XS_BUFFER_SIZE): SFlux[V] = coreFlux.switchMap[V](fn, prefetch) + + override def subscribe(s: Subscriber[_ >: T]): Unit = coreFlux.subscribe(s) + + final def tag(key: String, value: String): SFlux[T] = coreFlux.tag(key, value) + + final def take(timespan: Duration, timer: Scheduler = Schedulers.parallel): SFlux[T] = coreFlux.take(timespan, timer) + + final def takeLast(n: Int): SFlux[T] = coreFlux.takeLast(n) + + final def takeUntil(predicate: T => Boolean): SFlux[T] = coreFlux.takeUntil(predicate) + + final def takeWhile(continuePredicate: T => Boolean): SFlux[T] = coreFlux.takeWhile(continuePredicate) + + final def `then`(): SMono[Unit] = new ReactiveSMono(coreFlux.`then`()).map(_ => ()) + + final def thenEmpty(other: MapablePublisher[Unit]): SMono[Unit] = new ReactiveSMono( + coreFlux.thenEmpty(publisherUnit2PublisherVoid(other))).map(_ => ()) + + final def thenMany[V](other: Publisher[V]): SFlux[V] = coreFlux.thenMany[V](other) + + final def timeout(timeout: Duration): SFlux[T] = coreFlux.timeout(timeout) + + final def timeout(timeout: Duration, fallback: Option[Publisher[_ <: T]]): SFlux[T] = coreFlux.timeout(timeout, fallback.orNull) + + final def timeout[U](firstTimeout: Publisher[U]): SFlux[T] = coreFlux.timeout[U](firstTimeout) + + final def timeout[U, V](firstTimeout: Publisher[U], nextTimeoutFactory: T => Publisher[V]): SFlux[T] = coreFlux.timeout(firstTimeout, nextTimeoutFactory) + + final def timeout[U, V](firstTimeout: Publisher[U], nextTimeoutFactory: T => Publisher[V], fallback: Publisher[_ <: T]): SFlux[T] = + coreFlux.timeout(firstTimeout, nextTimeoutFactory, fallback) + + final def toIterable(batchSize: Int = SMALL_BUFFER_SIZE, queueProvider: Option[Supplier[util.Queue[T]]] = None): Iterable[T] = coreFlux.toIterable(batchSize, queueProvider.orNull).asScala + + final def toStream(batchSize: Int = SMALL_BUFFER_SIZE): Stream[T] = coreFlux.toStream.iterator().asScala.toStream + + final def transform[V](transformer: Flux[T] => Publisher[V]): SFlux[V] = coreFlux.transform[V](transformer) + + final def withLatestFrom[U, R](other: Publisher[_ <: U], resultSelector: (T, U) => _ <: R): SFlux[R] = coreFlux.withLatestFrom[U, R](other, resultSelector) + + final def zipWith[T2](source2: Publisher[_ <: T2], prefetch: Int = XS_BUFFER_SIZE): SFlux[(T, T2)] = zipWithCombinator(source2, (t: T, t2: T2) => (t, t2), prefetch) + + final def zipWithCombinator[T2, V](source2: Publisher[_ <: T2], combinator: (T, T2) => V, prefetch: Int = XS_BUFFER_SIZE): SFlux[V] = coreFlux.zipWith[T2, V](source2, prefetch, combinator) + + final def zipWithIterable[T2](iterable: Iterable[_ <: T2]): SFlux[(T, T2)] = zipWithIterable(iterable, (t: T, t2: T2) => (t, t2)) + + final def zipWithIterable[T2, V](iterable: Iterable[_ <: T2], zipper: (T, T2) => _ <: V): SFlux[V] = coreFlux.zipWithIterable[T2, V](iterable, zipper) + +} + +object SFlux { + def apply[T](elements: T*): SFlux[T] = SFlux.fromIterable(elements) + + def combineLatest[T1, T2](p1: Publisher[T1], p2: Publisher[T2]): SFlux[(T1, T2)] = + new ReactiveSFlux[(T1, T2)](JFlux.combineLatest(p1, p2, (t1: T1, t2: T2) => (t1, t2))) + + def combineLatest[T](sources: Publisher[T]*): SFlux[Seq[T]] = + new ReactiveSFlux[Seq[T]](JFlux.combineLatest[T, Seq[T]] + (sources, (arr: Array[AnyRef]) => arr.toSeq map (_.asInstanceOf[T]))) + + def combineLatestMap[T1, T2, V](p1: Publisher[T1], p2: Publisher[T2], mapper: (T1, T2) => V): SFlux[V] = + new ReactiveSFlux[V](JFlux.combineLatest(p1, p2, mapper)) + + def combineLatestMap[T: ClassTag, V](mapper: Array[T] => V, sources: Publisher[T]*): SFlux[V] = { + val f = (arr: Array[AnyRef]) => { + val x: Seq[T] = arr.toSeq map (_.asInstanceOf[T]) + mapper(x.toArray) + } + new ReactiveSFlux[V](JFlux.combineLatest(sources, f)) + } + + def concat[T](sources: Publisher[T]*): SFlux[T] = new ReactiveSFlux(JFlux.concat(sources)) + + def concatDelayError[T](sources: Publisher[T]*): SFlux[T] = new ReactiveSFlux[T](JFlux.concatDelayError(sources: _*)) + + def create[T](emitter: FluxSink[T] => Unit, backPressure: FluxSink.OverflowStrategy = OverflowStrategy.BUFFER): SFlux[T] = new ReactiveSFlux[T](JFlux.create(emitter, backPressure)) + + def defer[T](f: => SFlux[T]): SFlux[T] = new ReactiveSFlux[T](JFlux.defer(() => f)) + + def empty[T]: SFlux[T] = new ReactiveSFlux(JFlux.empty[T]()) + + def firstEmitter[I](sources: Publisher[_ <: I]*): SFlux[I] = new ReactiveSFlux[I](JFlux.first[I](sources: _*)) + + def fromArray[T <: AnyRef](array: Array[T]): SFlux[T] = new ReactiveSFlux[T](JFlux.fromArray[T](array)) + + def fromIterable[T](iterable: Iterable[T]): SFlux[T] = new ReactiveSFlux[T](JFlux.fromIterable(iterable.asJava)) + + def fromPublisher[T](source: Publisher[_ <: T]): SFlux[T] = new ReactiveSFlux[T](JFlux.from(source)) + + def fromStream[T](streamSupplier: () => Stream[T]): SFlux[T] = new ReactiveSFlux[T](JFlux.fromStream[T](streamSupplier())) + + def generate[T, S](generator: (S, SynchronousSink[T]) => S, + stateSupplier: Option[Callable[S]] = None, + stateConsumer: Option[S => Unit] = None): SFlux[T] = new ReactiveSFlux[T]( + JFlux.generate[T, S](stateSupplier.orNull[Callable[S]], generator, stateConsumer.orNull[S => Unit]) + ) + + def interval(period: Duration, scheduler: Scheduler = Schedulers.parallel())(implicit delay: Duration = period): SFlux[Long] = + new ReactiveSFlux[Long](JFlux.interval(delay, period).map((l: JLong) => Long2long(l))) + + def just[T](data: T*): SFlux[T] = apply[T](data: _*) + + def mergeSequentialPublisher[T](sources: Publisher[Publisher[T]], delayError: Boolean = false, maxConcurrency: Int = SMALL_BUFFER_SIZE, prefetch: Int = XS_BUFFER_SIZE): SFlux[T] = + new ReactiveSFlux[T]( + if (delayError) JFlux.mergeSequentialDelayError[T](sources, maxConcurrency, prefetch) + else JFlux.mergeSequential[T](sources, maxConcurrency, prefetch) + ) + + def mergeSequential[I](sources: Seq[Publisher[_ <: I]], delayError: Boolean = false, prefetch: Int = XS_BUFFER_SIZE): SFlux[I] = + new ReactiveSFlux[I]( + if (delayError) JFlux.mergeSequentialDelayError(prefetch, sources: _*) + else JFlux.mergeSequential(prefetch, sources: _*) + ) + + def mergeSequentialIterable[I](sources: Iterable[Publisher[_ <: I]], delayError: Boolean = false, maxConcurrency: Int = SMALL_BUFFER_SIZE, prefetch: Int = XS_BUFFER_SIZE) = + new ReactiveSFlux[I]( + if (delayError) JFlux.mergeSequentialDelayError[I](sources, maxConcurrency, prefetch) + else JFlux.mergeSequential[I](sources, maxConcurrency, prefetch)) + + def never[T](): SFlux[T] = new ReactiveSFlux[T](JFlux.never[T]()) + + def push[T](emitter: FluxSink[T] => Unit, backPressure: FluxSink.OverflowStrategy = OverflowStrategy.BUFFER): SFlux[T] = new ReactiveSFlux[T](JFlux.push(emitter, backPressure)) + + def raiseError[T](e: Throwable, whenRequested: Boolean = false): SFlux[T] = new ReactiveSFlux[T](JFlux.error(e, whenRequested)) + + def range(start: Int, count: Int): SFlux[Int] = new ReactiveSFlux[Int](JFlux.range(start, count).map((i: java.lang.Integer) => Integer2int(i))) + + def using[T, D](resourceSupplier: () => D, sourceSupplier: D => Publisher[_ <: T], resourceCleanup: D => Unit, eager: Boolean = false): SFlux[T] = + new ReactiveSFlux[T](JFlux.using[T, D](resourceSupplier, sourceSupplier, resourceCleanup, eager)) + + def zip[T1, T2](source1: Publisher[_ <: T1], source2: Publisher[_ <: T2]): SFlux[(T1, T2)] = + new ReactiveSFlux[(T1, T2)](JFlux.zip[T1, T2, (T1, T2)](source1, source2, (t1: T1, t2: T2) => (t1, t2))) + + def zip3[T1, T2, T3](source1: Publisher[_ <: T1], source2: Publisher[_ <: T2], source3: Publisher[_ <: T3]): SFlux[(T1, T2, T3)] = { + new ReactiveSFlux[(T1, T2, T3)](JFlux.zip[T1, T2, T3](source1, source2, source3) + .map[(T1, T2, T3)]((t: Tuple3[T1, T2, T3]) => tupleThree2ScalaTuple3[T1, T2, T3](t))) + } + + def zip4[T1, T2, T3, T4](source1: Publisher[_ <: T1], source2: Publisher[_ <: T2], source3: Publisher[_ <: T3], source4: Publisher[_ <: T4]): SFlux[(T1, T2, T3, T4)] = + new ReactiveSFlux[(T1, T2, T3, T4)](JFlux.zip[T1, T2, T3, T4](source1, source2, source3, source4) + .map[(T1, T2, T3, T4)]((t: Tuple4[T1, T2, T3, T4]) => tupleFour2ScalaTuple4(t))) + + def zip5[T1, T2, T3, T4, T5](source1: Publisher[_ <: T1], source2: Publisher[_ <: T2], source3: Publisher[_ <: T3], source4: Publisher[_ <: T4], source5: Publisher[_ <: T5]): SFlux[(T1, T2, T3, T4, T5)] = + new ReactiveSFlux[(T1, T2, T3, T4, T5)](JFlux.zip[T1, T2, T3, T4, T5](source1, source2, source3, source4, source5) + .map[(T1, T2, T3, T4, T5)]((t: Tuple5[T1, T2, T3, T4, T5]) => tupleFive2ScalaTuple5(t))) + + def zip6[T1, T2, T3, T4, T5, T6](source1: Publisher[_ <: T1], source2: Publisher[_ <: T2], source3: Publisher[_ <: T3], source4: Publisher[_ <: T4], source5: Publisher[_ <: T5], source6: Publisher[_ <: T6]): SFlux[(T1, T2, T3, T4, T5, T6)] = + new ReactiveSFlux[(T1, T2, T3, T4, T5, T6)](JFlux.zip[T1, T2, T3, T4, T5, T6](source1, source2, source3, source4, source5, source6) + .map((t: Tuple6[T1, T2, T3, T4, T5, T6]) => tupleSix2ScalaTuple6(t))) + + def zipMap[T1, T2, O](source1: Publisher[_ <: T1], source2: Publisher[_ <: T2], combinator: (T1, T2) => O): SFlux[O] = + new ReactiveSFlux[O](JFlux.zip[T1, T2, O](source1, source2, combinator)) + + def zipMapIterable[O](sources: Iterable[_ <: Publisher[_]], combinator: Array[_] => O, prefetch: Int = SMALL_BUFFER_SIZE): SFlux[O] = + new ReactiveSFlux[O](JFlux.zip[O](sources, prefetch, combinator)) + + def zipMap[I, O](combinator: Array[AnyRef] => O, sources: Seq[Publisher[_ <: I]], prefetch: Int = XS_BUFFER_SIZE): SFlux[O] = + new ReactiveSFlux[O](JFlux.zip[I, O](combinator, prefetch, sources: _*)) +} + +private[publisher] class ReactiveSFlux[T](publisher: Publisher[T]) extends SFlux[T] with Scannable { + override private[publisher] val coreFlux: JFlux[T] = JFlux.from(publisher) + + override val jScannable: JScannable = JScannable.from(coreFlux) +} diff --git a/src/main/scala/reactor/core/scala/publisher/SFluxLike.scala b/src/main/scala/reactor/core/scala/publisher/SFluxLike.scala new file mode 100644 index 00000000..f6ca6148 --- /dev/null +++ b/src/main/scala/reactor/core/scala/publisher/SFluxLike.scala @@ -0,0 +1,87 @@ +package reactor.core.scala.publisher + +import java.util.concurrent.TimeUnit +import java.util.function.Function + +import org.reactivestreams.{Publisher, Subscription} +import reactor.core.publisher.{Flux => JFlux} +import reactor.core.scala.publisher.PimpMyPublisher._ +import reactor.core.scheduler.Schedulers +import reactor.util.concurrent.Queues.XS_BUFFER_SIZE + +import scala.language.higherKinds + +trait SFluxLike[T, Self[U] <: SFluxLike[U, Self]] { + self: Self[T] => + + final def collect[E](containerSupplier: () => E, collector: (E, T) => Unit): SMono[E] = new ReactiveSMono[E](coreFlux.collect(containerSupplier, collector: JBiConsumer[E, T])) + + final def concatMap[V](mapper: T => Publisher[_ <: V], prefetch: Int = XS_BUFFER_SIZE): SFlux[V] = new ReactiveSFlux[V](coreFlux.concatMap[V](mapper, prefetch)) + + private[publisher] def coreFlux: JFlux[T] + + private def defaultToFluxError[U](t: Throwable): SFlux[U] = SFlux.raiseError(t) + + final def doOnSubscribe(onSubscribe: Subscription => Unit): SFlux[T] = new ReactiveSFlux[T](coreFlux.doOnSubscribe(onSubscribe)) + + final def drop(n: Long): SFlux[T] = skip(n) + + final def flatten[S](implicit ev: T <:< SFlux[S]): SFlux[S] = concatMap[S](x => ev(x), XS_BUFFER_SIZE) + + final def foldLeft[R](initial: R)(binaryOps: (R, T) => R): SMono[R] = reduce[R](initial, binaryOps) + + final def head: SMono[T] = take(1).as(SMono.fromPublisher) + + final def max[R >: T](implicit ev: Ordering[R]): SMono[Option[R]] = foldLeft(None: Option[R]) { (acc: Option[R], el: T) => { + acc map (a => ev.max(a, el)) orElse Option(el) + } + } + + final def min[R >: T](implicit ev: Ordering[R]): SMono[Option[R]] = foldLeft(None: Option[R]) { (acc: Option[R], el: T) => { + acc map (a => ev.min(a, el)) orElse Option(el) + } + } + + final def onErrorRecover[U <: T](pf: PartialFunction[Throwable, U]): SFlux[T] = { + def recover(t: Throwable): SFlux[U] = pf.andThen(u => SFlux.just(u)).applyOrElse(t, defaultToFluxError) + + onErrorResume(recover) + } + + final def onErrorRecoverWith[U <: T](pf: PartialFunction[Throwable, SFlux[U]]): SFlux[T] = { + def recover(t: Throwable): SFlux[U] = pf.applyOrElse(t, defaultToFluxError) + onErrorResume(recover) + } + + final def onErrorResume[U <: T](fallback: Throwable => _ <: Publisher[_ <: U]): SFlux[U] = { + val predicate = new Function[Throwable, Publisher[_ <: U]] { + override def apply(t: Throwable): Publisher[_ <: U] = fallback(t) + } + val x: SFlux[T] = coreFlux.onErrorResume(predicate) + x.as[SFlux[U]](t => t.map(u => u.asInstanceOf[U])) + } + + final def reduce[A](initial: A, accumulator: (A, T) => A): SMono[A] = coreFlux.reduce[A](initial, accumulator) + + final def skip(skipped: Long): SFlux[T] = coreFlux.skip(skipped) + + final def sum[R >: T](implicit R: Numeric[R]): SMono[R] = { + import R._ + foldLeft(R.zero) { (acc: R, el: T) => acc + el } + } + + /** + * Alias for [[skip]](1) + * @return + */ + final def tail: SFlux[T] = skip(1) + + final def take(n: Long): SFlux[T] = new ReactiveSFlux[T](coreFlux.take(n)) + + final def zipWithTimeSinceSubscribe(): SFlux[(T, Long)] = { + val scheduler = Schedulers.single() + var subscriptionTime: Long = 0 + doOnSubscribe(_ => subscriptionTime = scheduler.now(TimeUnit.MILLISECONDS)) + .map(t => (t, scheduler.now(TimeUnit.MILLISECONDS) - subscriptionTime)) + } +} diff --git a/src/main/scala/reactor/core/scala/publisher/SMono.scala b/src/main/scala/reactor/core/scala/publisher/SMono.scala new file mode 100644 index 00000000..67aba0b7 --- /dev/null +++ b/src/main/scala/reactor/core/scala/publisher/SMono.scala @@ -0,0 +1,1560 @@ +package reactor.core.scala.publisher + +import java.lang.{Boolean => JBoolean, Long => JLong} +import java.util.concurrent.{Callable, CompletableFuture} +import java.util.function.{Consumer, Function} +import java.util.logging.Level + +import org.reactivestreams.{Publisher, Subscriber, Subscription} +import reactor.core.publisher.{MonoSink, Signal, SignalType, SynchronousSink, Flux => JFlux, Mono => JMono} +import reactor.core.scala.Scannable +import reactor.core.scala.publisher.PimpMyPublisher._ +import reactor.core.scheduler.{Scheduler, Schedulers} +import reactor.core.{Disposable, Scannable => JScannable} +import reactor.util.concurrent.Queues.SMALL_BUFFER_SIZE +import reactor.util.context.Context +import reactor.util.function.{Tuple2, Tuple3, Tuple4, Tuple5, Tuple6} + +import scala.collection.JavaConverters._ +import scala.concurrent.duration.Duration +import scala.concurrent.{ExecutionContext, Future, Promise} +import scala.util.{Failure, Success, Try} + +/** + * A Reactive Streams [[Publisher]] with basic rx operators that completes successfully by emitting an element, or + * with an error. + * + *
+ *
+ *
+ * + *
The rx operators will offer aliases for input [[Mono]] type to preserve the "at most one" + * property of the resulting [[Mono]]. For instance [[Mono.flatMap flatMap]] returns a [[Flux]] with + * possibly + * more than 1 emission. Its alternative enforcing [[Mono]] input is [[Mono.`then` then]]. + * + *
`SMono[Unit]` should be used for [[Publisher]] that just completes without any value. + * + *
It is intended to be used in implementations and return types, input parameters should keep using raw + * [[Publisher]] as much as possible. + * + *
Note that using state in the `scala.Function` / lambdas used within Mono operators + * should be avoided, as these may be shared between several [[Subscriber Subscribers]]. + * + * @tparam T the type of the single value of this class + * @see [[SFlux]] + */ +trait SMono[T] extends SMonoLike[T, SMono] with MapablePublisher[T] { + self => + + /** + * Join the termination signals from this mono and another source into the returned + * void mono + * + *
+ *
+ *
+ * + * @param other the [[Publisher]] to wait for + * complete + * @return a new combined [[SMono]] + * @see [[SMono.when]] + */ + final def and(other: Publisher[_]): SMono[Unit] = { + new ReactiveSMono(coreMono.and(other match { + case f: SFlux[_] => f.coreFlux + case m: SMono[_] => m.coreMono + })) map[Unit] (_ => ()) + } + + /** + * Transform this [[SMono]] into a target type. + * + * `mono.as(Flux::from).subscribe()` + * + * @param transformer the { @link Function} applying this { @link Mono} + * @tparam P the returned instance type + * @return the transformed [[SMono]] to instance P + * @see [[SMono.compose]] for a bounded conversion to [[org.reactivestreams.Publisher]] + */ + final def as[P](transformer: SMono[T] => P): P = transformer(this) + + /** + * Get the underlying [[reactor.core.publisher.Mono]] + * + * @return [[reactor.core.publisher.Mono]] + */ + final def asJava(): JMono[T] = coreMono + + /** + * Block until a next signal is received, will return null if onComplete, T if onNext, throw a + * `Exceptions.DownstreamException` if checked error or origin RuntimeException if unchecked. + * If the default timeout `30 seconds` has elapsed,a [[RuntimeException]] will be thrown. + * + * Note that each block() will subscribe a new single (MonoSink) subscriber, in other words, the result might + * miss signal from hot publishers. + * + *
+ *
+ *
+ * + * @param timeout maximum time period to wait for before raising a [[RuntimeException]]. Defaulted to [[Duration.Inf]] + * @return T the result + */ + final def block(timeout: Duration = Duration.Inf): T = + if (timeout == Duration.Inf) coreMono.block() + else coreMono.block(timeout) + + /** + * Subscribe to this [[Mono]] and block until a next signal is + * received, the Mono completes empty or a timeout expires. Returns an [[Option]] + * for the first two cases, which can be used to replace the empty case with an + * Exception via [[Option.orElse(throw exception)]]. + * In case the Mono itself errors, the original exception is thrown (wrapped in a + * [[RuntimeException]] if it was a checked exception). + * If the provided timeout expires, a [[RuntimeException]] is thrown. + * + *
+ *
+ *
+ * Note that each block() will trigger a new subscription: in other words, the result + * might miss signal from hot publishers. + * + * @param timeout maximum time period to wait for before raising a [[RuntimeException]]. Defaulted to [[Duration.Inf]] + * @return T the result + */ + final def blockOption(timeout: Duration = Duration.Inf): Option[T] = + if (timeout == Duration.Inf) coreMono.blockOptional() + else coreMono.blockOptional(timeout) + + /** + * Cast the current [[SMono]] produced type into a target produced type. + * + *
+ *
+ *
+ * @tparam E the [[SMono]] output type
+ * @param clazz the target type to cast to
+ * @return a casted [[SMono]]
+ */
+ final def cast[E](clazz: Class[E]): SMono[E] = coreMono.cast(clazz)
+
+ /**
+ * Turn this [[SMono]] into a hot source and cache last emitted signals for further
+ * [[Subscriber]], with an expiry timeout.
+ *
+ * Completion and Error will also be replayed until `ttl` triggers in which case + * the next [[Subscriber]] will start over a new subscription. + *
+ *
+ *
+ * @return a replaying [[SMono]]
+ */
+ final def cache(ttl: Duration = Duration.Inf): SMono[T] =
+ if (ttl == Duration.Inf) coreMono.cache()
+ else coreMono.cache(ttl)
+
+ /**
+ * Prepare this [[SMono]] so that subscribers will cancel from it on a
+ * specified
+ * [[reactor.core.scheduler.Scheduler]].
+ *
+ * @param scheduler the [[reactor.core.scheduler.Scheduler]] to signal cancel on
+ * @return a scheduled cancel [[SMono]]
+ */
+ final def cancelOn(scheduler: Scheduler): SMono[T] = coreMono.cancelOn(scheduler)
+
+ /**
+ * Defer the given transformation to this [[Mono]] in order to generate a
+ * target [[SMono]] type. A transformation will occur for each
+ * [[org.reactivestreams.Subscriber]].
+ *
+ * `flux.compose(SMono::fromPublisher).subscribe()`
+ *
+ * @param transformer the function to immediately map this [[Mono]] into a target [[Mono]]
+ * instance.
+ * @tparam V the item type in the returned [[org.reactivestreams.Publisher]]
+ * @return a new [[SMono]]
+ * @see [[SMono.as]] for a loose conversion to an arbitrary type
+ */
+ final def compose[V](transformer: SMono[T] => Publisher[V]): SMono[V] = {
+ val transformerFunction = new Function[JMono[T], Publisher[V]] {
+ override def apply(t: JMono[T]): Publisher[V] = transformer(SMono.this)
+ }
+ coreMono.compose(transformerFunction)
+ }
+
+ /**
+ * Concatenate emissions of this [[SMono]] with the provided [[Publisher]]
+ * (no interleave).
+ *
+ *
+ *
+ * @param other the [[Publisher]] sequence to concat after this [[SFlux]]
+ * @return a concatenated [[SFlux]]
+ */
+ final def concatWith(other: Publisher[T]): SFlux[T] = coreMono.concatWith(other)
+
+ final def ++(other: Publisher[T]): SFlux[T] = concatWith(other)
+
+ private[publisher] def coreMono: JMono[T]
+
+ /**
+ * Provide a default unique value if this mono is completed without any data
+ *
+ *
+ *
+ *
+ * + * @param defaultV the alternate value if this sequence is empty + * @return a new [[SMono]] + * @see [[SFlux.defaultIfEmpty]] + */ + final def defaultIfEmpty(defaultV: T): SMono[T] = coreMono.defaultIfEmpty(defaultV) + + /** + * Delay this [[SMono]] element ([[Subscriber.onNext]] signal) by a given + * [[Duration]], on a particular [[Scheduler]]. Empty monos or error signals are not delayed. + * + *
+ *
+ *
+ *
+ * Note that the scheduler on which the mono chain continues execution will be the + * scheduler provided if the mono is valued, or the current scheduler if the mono + * completes empty or errors. + * + * @param delay [[Duration]] by which to delay the [[Subscriber.onNext]] signal + * @param timer a time-capable [[Scheduler]] instance to delay the value signal on + * @return a delayed [[SMono]] + */ + final def delayElement(delay: Duration, timer: Scheduler = Schedulers.parallel()): SMono[T] = coreMono.delayElement(delay) + + /** + * Delay the [[SMono.subscribe subscription]] to this [[SMono]] source until the given + * [[Duration]] elapses. + * + *
+ *
+ *
+ * @param delay [[Duration]] before subscribing this [[SMono]]
+ * @param timer a time-capable [[Scheduler]] instance to run on
+ * @return a delayed [[SMono]]
+ *
+ */
+ final def delaySubscription(delay: Duration, timer: Scheduler = Schedulers.parallel()): SMono[T] = new ReactiveSMono[T](coreMono.delaySubscription(delay, timer))
+
+ /**
+ * Delay the subscription to this [[SMono]] until another [[Publisher]]
+ * signals a value or completes.
+ *
+ *
+ *
+ *
+ * @param subscriptionDelay a
+ * [[Publisher]] to signal by next or complete this [[SMono.subscribe]]
+ * @tparam U the other source type
+ * @return a delayed [[SMono]]
+ *
+ */
+ final def delaySubscription[U](subscriptionDelay: Publisher[U]): SMono[T] = new ReactiveSMono[T](coreMono.delaySubscription(subscriptionDelay))
+
+ /**
+ * Subscribe to this [[SMono]] and another [[Publisher]] that is generated from
+ * this Mono's element and which will be used as a trigger for relaying said element.
+ *
+ * That is to say, the resulting [[SMono]] delays until this Mono's element is + * emitted, generates a trigger Publisher and then delays again until the trigger + * Publisher terminates. + *
+ * Note that contiguous calls to all delayUntil are fused together. + * The triggers are generated and subscribed to in sequence, once the previous trigger + * completes. Error is propagated immediately + * downstream. In both cases, an error in the source is immediately propagated. + *
+ *
+ *
+ * @param triggerProvider a [[Function1]] that maps this Mono's value into a
+ * [[Publisher]] whose termination will trigger relaying the value.
+ * @return this [[SMono]], but delayed until the derived publisher terminates.
+ */
+ final def delayUntil(triggerProvider: T => Publisher[_]): SMono[T] = coreMono.delayUntil(triggerProvider)
+
+ /**
+ * A "phantom-operator" working only if this
+ * [[SMono]] is a emits onNext, onError or onComplete [[reactor.core.publisher.Signal]]. The relative [[org.reactivestreams.Subscriber]]
+ * callback will be invoked, error [[reactor.core.publisher.Signal]] will trigger onError and complete [[reactor.core.publisher.Signal]] will trigger
+ * onComplete.
+ *
+ *
+ *
+ *
+ * @tparam X the dematerialized type
+ * @return a dematerialized [[SMono]]
+ */
+ final def dematerialize[X](): SMono[X] = coreMono.dematerialize[X]()
+
+ /**
+ * Triggered after the [[SMono]] terminates, either by completing downstream successfully or with an error.
+ * The arguments will be null depending on success, success with data and error:
+ *
+ *
+ *
+ * + * @param afterTerminate the callback to call after [[org.reactivestreams.Subscriber.onNext]], [[org.reactivestreams.Subscriber.onComplete]] without preceding [[org.reactivestreams.Subscriber.onNext]] or [[org.reactivestreams.Subscriber.onError]] + * @return a new [[SMono]] + */ + final def doAfterSuccessOrError(afterTerminate: Try[_ <: T] => Unit): SMono[T] = { + val biConsumer = (t: T, u: Throwable) => Option(t) match { + case Some(s) => afterTerminate(Success(s)) + case Some(null) | None => afterTerminate(Failure(u)) + } + coreMono.doAfterSuccessOrError(biConsumer) + } + + /** + * Add behavior (side-effect) triggered after the [[SMono]] terminates, either by + * completing downstream successfully or with an error. + *
+ *
+ *
+ * + * @param afterTerminate the callback to call after [[Subscriber.onComplete]] or [[Subscriber.onError]] + * @return an observed [[SMono]] + */ + final def doAfterTerminate(afterTerminate: () => Unit): SMono[T] = coreMono.doAfterTerminate(afterTerminate) + + /** + * Add behavior triggering after the [[SMono]] terminates for any reason, + * including cancellation. The terminating event [[SignalType.ON_COMPLETE]], + * [[SignalType#ON_ERROR]] and [[SignalType#CANCEL]]) is passed to the consumer, + * which is executed after the signal has been passed downstream. + *
+ * Note that the fact that the signal is propagated downstream before the callback is + * executed means that several doFinally in a row will be executed in + * reverse order. If you want to assert the execution of the callback + * please keep in mind that the Mono will complete before it is executed, so its + * effect might not be visible immediately after eg. a [[SMono.block()]]. + * + * @param onFinally the callback to execute after a terminal signal (complete, error + * or cancel) + * @return an observed [[SMono]] + */ + final def doFinally(onFinally: SignalType => Unit): SMono[T] = coreMono.doFinally(onFinally) + + /** + * Triggered when the [[SMono]] is cancelled. + * + *
+ *
+ *
+ * + * @param onCancel the callback to call on [[org.reactivestreams.Subscriber.cancel]] + * @return a new [[SMono]] + */ + final def doOnCancel(onCancel: () => Unit): SMono[T] = coreMono.doOnCancel(onCancel) + + /** + * Add behavior triggered when the [[SMono]] emits a data successfully. + * + *
+ *
+ *
+ * + * @param onNext the callback to call on [[Subscriber.onNext]] + * @return a new [[SMono]] + */ + final def doOnNext(onNext: T => Unit): SMono[T] = coreMono.doOnNext(onNext) + + /** + * Triggered when the [[SMono]] completes successfully. + * + *
+ *
+ *
+ * + * @param onSuccess the callback to call on, argument is null if the [[SMono]] + * completes without data + * [[org.reactivestreams.Subscriber.onNext]] or [[org.reactivestreams.Subscriber.onComplete]] without preceding [[org.reactivestreams.Subscriber.onNext]] + * @return a new [[SMono]] + */ + final def doOnSuccess(onSuccess: T => Unit): SMono[T] = coreMono.doOnSuccess(onSuccess) + + /** + * Triggered when the [[SMono]] completes with an error. + * + *
+ *
+ *
+ * + * @param onError the error callback to call on [[org.reactivestreams.Subscriber.onError]] + * @return a new [[SMono]] + */ + final def doOnError(onError: Throwable => Unit): SMono[T] = coreMono.doOnError(onError) + + /** + * Attach a `Long consumer` to this [[SMono]] that will observe any request to this [[SMono]]. + * + *
+ *
+ *
+ * @param consumer the consumer to invoke on each request
+ * @return an observed [[SMono]]
+ */
+ final def doOnRequest(consumer: Long => Unit): SMono[T] = coreMono.doOnRequest(consumer)
+
+ /**
+ * Triggered when the [[SMono]] is subscribed.
+ *
+ *
+ *
+ *
+ * + * @param onSubscribe the callback to call on [[Subscriber.onSubscribe]] + * @return a new [[SMono]] + */ + final def doOnSubscribe(onSubscribe: Subscription => Unit): SMono[T] = coreMono.doOnSubscribe(onSubscribe) + + /** + * Add behavior triggered when the [[SMono]] terminates, either by completing successfully or with an error. + * + *
+ *
+ *
+ * + * @param onTerminate the callback to call [[Subscriber.onNext]], [[Subscriber.onComplete]] without preceding [[Subscriber.onNext]] or [[Subscriber.onError]] + * @return a new [[SMono]] + */ + final def doOnTerminate(onTerminate: () => Unit): SMono[T] = coreMono.doOnTerminate(onTerminate) + + /** + * Map this [[Mono]] sequence into [[scala.Tuple2]] of T1 [[Long]] timemillis and T2 + * `T` associated data. The timemillis corresponds to the elapsed time between the subscribe and the first + * next signal. + * + *
+ *
+ *
+ * @param scheduler the [[Scheduler]] to read time from. Defaulted to [[Schedulers.parallel()]]
+ * @return a transforming [[SMono]] that emits a tuple of time elapsed in milliseconds and matching data
+ */
+ final def elapsed(scheduler: Scheduler = Schedulers.parallel()): SMono[(Long, T)] = new ReactiveSMono[(Long, T)](coreMono.elapsed().map((t: Tuple2[JLong, T]) => javaTupleLongAndT2ScalaTupleLongAndT[T](t)))
+
+ /**
+ * Recursively expand elements into a graph and emit all the resulting element,
+ * in a depth-first traversal order.
+ *
+ * That is: emit the value from this [[Mono]], expand it and emit the first value + * at this first level of recursion, and so on... When no more recursion is possible, + * backtrack to the previous level and re-apply the strategy. + *
+ * For example, given the hierarchical structure + *
+ * A + * - AA + * - aa1 + * - AB + * - ab1 + * - a1 + *+ * + * Expands `Mono.just(A)` into + *
+ * A + * AA + * aa1 + * AB + * ab1 + * a1 + *+ * + * @param expander the [[Function1]] applied at each level of recursion to expand + * values into a [[Publisher]], producing a graph. + * @param capacityHint a capacity hint to prepare the inner queues to accommodate n + * elements per level of recursion. + * @return this Mono expanded depth-first to a [[SFlux]] + */ + final def expandDeep(expander: T => Publisher[_ <: T], capacityHint: Int = SMALL_BUFFER_SIZE): SFlux[T] = coreMono.expandDeep(expander, capacityHint) + + /** + * Recursively expand elements into a graph and emit all the resulting element using + * a breadth-first traversal strategy. + *
+ * That is: emit the value from this [[Mono]] first, then it each at a first level of + * recursion and emit all of the resulting values, then expand all of these at a + * second level and so on... + *
+ * For example, given the hierarchical structure + *
+ * A + * - AA + * - aa1 + * - AB + * - ab1 + * - a1 + *+ * + * Expands `Mono.just(A)` into + *
+ * A + * AA + * AB + * a1 + * aa1 + * ab1 + *+ * + * @param expander the [[Function1]] applied at each level of recursion to expand + * values into a [[Publisher]], producing a graph. + * @param capacityHint a capacity hint to prepare the inner queues to accommodate n + * elements per level of recursion. + * @return this Mono expanded breadth-first to a [[SFlux]] + */ + final def expand(expander: T => Publisher[_ <: T], capacityHint: Int = SMALL_BUFFER_SIZE): SFlux[T] = coreMono.expand(expander, capacityHint) + + /** + * Test the result if any of this [[SMono]] and replay it if predicate returns true. + * Otherwise complete without value. + * + *
+ *
+ *
+ * + * @param tester the predicate to evaluate + * @return a filtered [[SMono]] + */ + final def filter(tester: T => Boolean): SMono[T] = coreMono.filter(tester) + + /** + * If this [[SMono]] is valued, test the value asynchronously using a generated + * [[Publisher[Boolean]]] test. The value from the Mono is replayed if the + * first item emitted by the test is `true`. It is dropped if the test is + * either empty or its first emitted value is false``. + *
+ * Note that only the first value of the test publisher is considered, and unless it + * is a [[Mono]], test will be cancelled after receiving that first value. + * + * @param asyncPredicate the function generating a [[Publisher]] of [[Boolean]] + * to filter the Mono with + * @return a filtered [[SMono]] + */ + final def filterWhen(asyncPredicate: T => _ <: MapablePublisher[Boolean]): SMono[T] = { + val asyncPredicateFunction = new Function[T, Publisher[JBoolean]] { + override def apply(t: T): Publisher[JBoolean] = asyncPredicate(t).map(Boolean2boolean(_)) + } + coreMono.filterWhen(asyncPredicateFunction) + } + + /** + * Transform the item emitted by this [[SMono]] asynchronously, returning the + * value emitted by another [[SMono]] (possibly changing the value type). + * + *
+ *
+ *
+ * + * @param transformer the function to dynamically bind a new [[SMono]] + * @tparam R the result type bound + * @return a new [[SMono]] with an asynchronously mapped value. + */ + final def flatMap[R](transformer: T => SMono[R]): SMono[R] = coreMono.flatMap[R]((t: T) => transformer(t).coreMono) + + /** + * Transform the item emitted by this [[SMono]] into a Publisher, then forward + * its emissions into the returned [[SFlux]]. + * + *
+ *
+ *
+ * + * @param mapper the + * [[Function1]] to produce a sequence of R from the the eventual passed [[Subscriber.onNext]] + * @tparam R the merged sequence type + * @return a new [[SFlux]] as the sequence is not guaranteed to be single at most + */ + final def flatMapMany[R](mapper: T => Publisher[R]): SFlux[R] = coreMono.flatMapMany(mapper) + + /** + * Transform the signals emitted by this [[SMono]] into a Publisher, then forward + * its emissions into the returned [[SFlux]]. + * + *
+ *
+ *
+ * + * @param mapperOnNext the [[Function1]] to call on next data and returning a sequence to merge + * @param mapperOnError the[[Function1]] to call on error signal and returning a sequence to merge + * @param mapperOnComplete the [[Function1]] to call on complete signal and returning a sequence to merge + * @tparam R the type of the produced inner sequence + * @return a new [[SFlux]] as the sequence is not guaranteed to be single at most + * @see [[SFlux.flatMap]] + */ + final def flatMapMany[R](mapperOnNext: T => Publisher[R], + mapperOnError: Throwable => Publisher[R], + mapperOnComplete: () => Publisher[R]): SFlux[R] = + coreMono.flatMapMany(mapperOnNext, mapperOnError, mapperOnComplete) + + /** + * Transform the item emitted by this [[SMono]] into [[Iterable]], , then forward + * its elements into the returned [[Flux]]. The prefetch argument allows to + * give an + * arbitrary prefetch size to the inner [[Iterable]]. + * + *
+ *
+ *
+ * @param mapper the [[Function1]] to transform input item into a sequence [[Iterable]]
+ * @tparam R the merged output sequence type
+ * @return a merged [[SFlux]]
+ *
+ */
+ final def flatMapIterable[R](mapper: T => Iterable[R]): SFlux[R] = coreMono.flatMapIterable(mapper.andThen(it => it.asJava))
+
+ /**
+ * Convert this [[SMono]] to a [[SFlux]]
+ *
+ * @return a [[SFlux]] variant of this [[SMono]]
+ */
+ final def flux(): SFlux[T] = coreMono.flux()
+
+ /**
+ * Emit a single boolean true if this [[SMono]] has an element.
+ *
+ *
+ *
+ *
+ * @return a new [[SMono]] with true if a value is emitted and false
+ * otherwise
+ */
+ final def hasElement: SMono[Boolean] = coreMono.hasElement.map[Boolean](scalaFunction2JavaFunction((jb: JBoolean) => boolean2Boolean(jb.booleanValue())))
+
+ /**
+ * Handle the items emitted by this [[SMono]] by calling a biconsumer with the
+ * output sink for each onNext. At most one [[SynchronousSink.next]]
+ * call must be performed and/or 0 or 1 [[SynchronousSink.error]] or
+ * [[SynchronousSink.complete]].
+ *
+ * @param handler the handling `BiConsumer`
+ * @tparam R the transformed type
+ * @return a transformed [[SMono]]
+ */
+ final def handle[R](handler: (T, SynchronousSink[R]) => Unit): SMono[R] = coreMono.handle[R](handler)
+
+ /**
+ * Hides the identity of this [[SMono]] instance.
+ *
+ *
The main purpose of this operator is to prevent certain identity-based + * optimizations from happening, mostly for diagnostic purposes. + * + * @return a new [[SMono]] instance + */ + final def hide(): SMono[T] = coreMono.hide() + + /** + * Ignores onNext signal (dropping it) and only reacts on termination. + * + *
+ *
+ *
+ * + * @return a new completable [[SMono]]. + */ + final def ignoreElement: SMono[T] = coreMono.ignoreElement() + + /** + * Observe Reactive Streams signals matching the passed flags `options` and use + * [[reactor.util.Logger]] support to handle trace implementation. Default will use the passed + * [[Level]] and java.util.logging. If SLF4J is available, it will be used instead. + * + * Options allow fine grained filtering of the traced signal, for instance to only capture onNext and onError: + *
+ * mono.log("category", SignalType.ON_NEXT, SignalType.ON_ERROR)
+ *
+ *
+ *
+ *
+ * @param category to be mapped into logger configuration (e.g. org.springframework
+ * .reactor). If category ends with "." like "reactor.", a generated operator
+ * suffix will complete, e.g. "reactor.Flux.Map".
+ * @param level the { @link Level} to enforce for this tracing Mono (only FINEST, FINE,
+ * INFO, WARNING and SEVERE are taken into account)
+ * @param options a [[Seq]] of [[SignalType]] option to filter log messages
+ * @return a new [[SMono]]
+ *
+ */
+ final def log(category: Option[String] = None, level: Level = Level.INFO, showOperator: Boolean = false, options: Seq[SignalType] = Nil): SMono[T] = coreMono.log(category.orNull, level, showOperator, options: _*)
+
+ /**
+ * Transform the item emitted by this [[SMono]] by applying a synchronous function to it.
+ *
+ *
+ *
+ *
+ *
+ * @param mapper the synchronous transforming [[Function1]]
+ * @tparam R the transformed type
+ * @return a new [[SMono]]
+ */
+ final def map[R](mapper: T => R): SMono[R] = coreMono.map[R](mapper)
+
+ /**
+ * Transform incoming onNext, onError and onComplete signals into [[Signal]] instances,
+ * materializing these signals.
+ * Since the error is materialized as a [[Signal]], the propagation will be stopped and onComplete will be
+ * emitted. Complete signal will first emit a [[Signal.complete()]] and then effectively complete the flux.
+ * All these [[Signal]] have a [[reactor.util.context.Context]] associated to them.
+ *
+ *
+ *
+ * @return a [[SMono]] of materialized [[Signal]]
+ * @see [[SMono.dematerialize()]]
+ */
+ final def materialize(): SMono[Signal[T]] = coreMono.materialize()
+
+ /**
+ * Merge emissions of this [[SMono]] with the provided [[Publisher]].
+ *
+ *
+ *
+ *
+ *
+ * @param other the other [[Publisher]] to merge with
+ * @return a new [[SFlux]] as the sequence is not guaranteed to be at most 1
+ */
+ final def mergeWith(other: Publisher[_ <: T]): SFlux[T] = coreMono.mergeWith(other)
+
+ /**
+ * Give a name to this sequence, which can be retrieved using [[Scannable.name()]]
+ * as long as this is the first reachable [[Scannable.parents()]].
+ *
+ * @param name a name for the sequence
+ * @return the same sequence, but bearing a name
+ */
+ final def name(name: String): SMono[T] = coreMono.name(name)
+
+ /**
+ * Evaluate the accepted value against the given [[Class]] type. If the
+ * predicate test succeeds, the value is
+ * passed into the new [[SMono]]. If the predicate test fails, the value is
+ * ignored.
+ *
+ *
+ *
+ *
+ * @param clazz the [[Class]] type to test values against
+ * @return a new [[SMono]] reduced to items converted to the matched type
+ */
+ final def ofType[U](clazz: Class[U]): SMono[U] = coreMono.ofType[U](clazz)
+
+ /**
+ * Transform the error emitted by this [[SMono]] by applying a function.
+ *
+ *
+ *
+ * @param mapper the error transforming [[PartialFunction]]
+ * @return a transformed [[SMono]]
+ */
+ final def onErrorMap(mapper: PartialFunction[Throwable, Throwable]): SMono[T] =
+ coreMono.onErrorMap((t: Throwable) => if (mapper.isDefinedAt(t)) mapper(t) else t)
+
+ private def defaultToMonoError[U](t: Throwable): SMono[U] = SMono.raiseError[U](t)
+
+ final def onErrorRecover[U <: T](pf: PartialFunction[Throwable, U]): SMono[T] = {
+ def recover(t: Throwable): SMono[U] = pf.andThen(u => SMono.just(u)).applyOrElse(t, defaultToMonoError)
+
+ onErrorResume(recover)
+ }
+
+ /**
+ * Subscribe to a returned fallback publisher when any error occurs.
+ *
+ *
+ *
+ *
+ *
+ * @param fallback the function to map an alternative [[SMono]]
+ * @return an alternating [[SMono]] on source onError
+ * @see [[SFlux.onErrorResume]]
+ */
+ final def onErrorResume(fallback: Throwable => SMono[_ <: T]): SMono[T] = {
+ val fallbackFunction = new Function[Throwable, JMono[_ <: T]] {
+ override def apply(t: Throwable): JMono[_ <: T] = fallback(t).coreMono
+ }
+ coreMono.onErrorResume(fallbackFunction)
+ }
+
+ /**
+ * Detaches the both the child [[Subscriber]] and the [[Subscription]] on
+ * termination or cancellation.
+ *
This should help with odd retention scenarios when running
+ * with non-reactor [[Subscriber]].
+ *
+ * @return a detachable [[SMono]]
+ */
+ final def onTerminateDetach(): SMono[T] = coreMono.onTerminateDetach()
+
+ /**
+ * Emit the any of the result from this mono or from the given mono
+ *
+ *
+ *
+ *
+ *
+ * @param other the racing other [[SFlux]] to compete with for the result
+ * @return a new [[SFlux]]
+ * @see [[SMono.firstEmitter()]]
+ */
+ final def or(other: SMono[_ <: T]): SMono[T] = coreMono.or(other.coreMono)
+
+ /**
+ * Shares a [[SMono]] for the duration of a function that may transform it and
+ * consume it as many times as necessary without causing multiple subscriptions
+ * to the upstream.
+ *
+ * @param transform the transformation function
+ * @tparam R the output value type
+ * @return a new [[SMono]]
+ */
+ final def publish[R](transform: SMono[T] => SMono[R]): SMono[R] = {
+ val transformFunction = new Function[JMono[T], JMono[R]] {
+ override def apply(t: JMono[T]): JMono[R] = transform(SMono.this).coreMono
+ }
+ coreMono.publish[R](transformFunction)
+ }
+
+ /**
+ * Run onNext, onComplete and onError on a supplied [[Scheduler]]
+ *
+ *
+ *
+ * Typically used for fast publisher, slow consumer(s) scenarios.
+ *
+ * `mono.publishOn(Schedulers.single()).subscribe()`
+ *
+ * @param scheduler a checked { @link reactor.core.scheduler.Scheduler.Worker} factory
+ * @return an asynchronously producing [[SMono]]
+ */
+ //TODO: How to test this?
+ final def publishOn(scheduler: Scheduler): SMono[T] = coreMono.publishOn(scheduler)
+
+ /**
+ * Repeatedly subscribe to the source completion of the previous subscription.
+ *
+ *
+ *
+ *
+ * @return an indefinitively repeated [[SFlux]] on onComplete
+ */
+ final def repeat(numRepeat: Long = Long.MaxValue, predicate: () => Boolean = () => true): SFlux[T] = coreMono.repeat(numRepeat, predicate)
+
+ /**
+ * Repeatedly subscribe to this [[SMono]] when a companion sequence signals a number of emitted elements in
+ * response to the flux completion signal.
+ *
If the companion sequence signals when this [[Mono]] is active, the repeat
+ * attempt is suppressed and any terminal signal will terminate this [[SFlux]] with
+ * the same signal immediately.
+ *
+ *
+ *
+ *
+ * @param whenFactory the [[Function1]] providing a [[SFlux]] signalling an exclusive number of
+ * emitted elements on onComplete and returning a [[Publisher]] companion.
+ * @return an eventually repeated [[SFlux]] on onComplete when the companion [[Publisher]] produces an
+ * onNext signal
+ *
+ */
+ final def repeatWhen(whenFactory: SFlux[Long] => _ <: Publisher[_]): SFlux[T] = {
+ val when = new Function[JFlux[JLong], Publisher[_]] {
+ override def apply(t: JFlux[JLong]): Publisher[_] = whenFactory(new ReactiveSFlux[Long](t))
+ }
+ coreMono.repeatWhen(when)
+ }
+
+ /**
+ * Repeatedly subscribe to this [[SMono]] until there is an onNext signal when a companion sequence signals a
+ * number of emitted elements.
+ *
If the companion sequence signals when this [[SMono]] is active, the repeat
+ * attempt is suppressed and any terminal signal will terminate this [[SMono]] with the same signal immediately.
+ *
+ *
+ *
+ *
+ * @param repeatFactory the
+ * [[Function1]] providing a [[SFlux]] signalling the current number of repeat on onComplete and returning a [[Publisher]] companion.
+ * @return an eventually repeated [[SMono]] on onComplete when the companion [[Publisher]] produces an
+ * onNext signal
+ *
+ */
+ final def repeatWhenEmpty(repeatFactory: SFlux[Long] => Publisher[_], maxRepeat: Int = Int.MaxValue): SMono[T] = {
+ val when = new Function[JFlux[JLong], Publisher[_]] {
+ override def apply(t: JFlux[JLong]): Publisher[_] = repeatFactory(new ReactiveSFlux[Long](t))
+ }
+ coreMono.repeatWhenEmpty(when)
+ }
+
+ /**
+ * Re-subscribes to this [[SMono]] sequence if it signals any error
+ * either indefinitely or a fixed number of times.
+ *
+ * The times == Long.MAX_VALUE is treated as infinite retry.
+ *
+ *
+ *
+ *
+ * @param numRetries the number of times to tolerate an error
+ * @return a re-subscribing [[SMono]] on onError up to the specified number of retries.
+ *
+ */
+ final def retry(numRetries: Long = Long.MaxValue, retryMatcher: Throwable => Boolean = (_: Throwable) => true): SMono[T] = coreMono.retry(numRetries, retryMatcher)
+
+ /**
+ * Retries this [[SMono]] when a companion sequence signals
+ * an item in response to this [[SMono]] error signal
+ *
If the companion sequence signals when the [[SMono]] is active, the retry
+ * attempt is suppressed and any terminal signal will terminate the [[SMono]] source with the same signal
+ * immediately.
+ *
+ *
+ *
+ *
+ * @param whenFactory the [[Function1]] providing a [[SFlux]] signalling any error from the source sequence and returning a [[Publisher]] companion.
+ * @return a re-subscribing [[SMono]] on onError when the companion [[Publisher]] produces an
+ * onNext signal
+ */
+ final def retryWhen(whenFactory: SFlux[Throwable] => Publisher[_]): SMono[T] = {
+ val when = new Function[JFlux[Throwable], Publisher[_]] {
+ override def apply(t: JFlux[Throwable]): Publisher[_] = whenFactory(new ReactiveSFlux[Throwable](t))
+ }
+ coreMono.retryWhen(when)
+ }
+
+ /**
+ * Expect exactly one item from this [[SMono]] source or signal
+ * [[java.util.NoSuchElementException]] for an empty source.
+ *
+ *
+ *
+ * Note Mono doesn't need [[Flux.single(AnyRef)]], since it is equivalent to
+ * [[SMono.defaultIfEmpty(AnyRef)]] in a [[SMono]].
+ *
+ * @return a [[SMono]] with the single item or an error signal
+ */
+ final def single(): SMono[T] = coreMono.single()
+
+ /**
+ * Subscribe to this [[SMono]] and request unbounded demand.
+ *
+ * This version doesn't specify any consumption behavior for the events from the
+ * chain, especially no error handling, so other variants should usually be preferred.
+ *
+ *
+ *
+ *
+ *
+ * @return a new [[Disposable]] that can be used to cancel the underlying [[Subscription]]
+ */
+ final def subscribe(): Disposable = coreMono.subscribe()
+
+ /**
+ * Subscribe a [[scala.Function1[T,Unit] Consumer]] to this [[SMono]] that will consume all the
+ * sequence.
+ *
+ * For a passive version that observe and forward incoming data see [[SMono.doOnSuccess]] and
+ * [[SMono.doOnError]].
+ *
+ *
+ *
+ *
+ * @param consumer the consumer to invoke on each value
+ * @return a new [[Runnable]] to dispose the [[Subscription]]
+ */
+ final def subscribe(consumer: T => Unit): Disposable = coreMono.subscribe(consumer)
+
+ /**
+ * Subscribe [[scala.Function1[T,Unit] Consumer]] to this [[SMono]] that will consume all the
+ * sequence.
+ *
+ * For a passive version that observe and forward incoming data see [[SMono.doOnSuccess]] and
+ * [[SMono.doOnError]].
+ *
+ *
+ *
+ *
+ * @param consumer the consumer to invoke on each next signal
+ * @param errorConsumer the consumer to invoke on error signal
+ * @return a new [[Runnable]] to dispose the [[org.reactivestreams.Subscription]]
+ */
+ final def subscribe(consumer: T => Unit, errorConsumer: Throwable => Unit): Disposable = coreMono.subscribe(consumer, errorConsumer)
+
+ /**
+ * Subscribe `consumer` to this [[SMono]] that will consume all the
+ * sequence.
+ *
+ * For a passive version that observe and forward incoming data see [[SMono.doOnSuccess]] and
+ * [[SMono.doOnError]].
+ *
+ *
+ *
+ *
+ * @param consumer the consumer to invoke on each value
+ * @param errorConsumer the consumer to invoke on error signal
+ * @param completeConsumer the consumer to invoke on complete signal
+ * @return a new [[Disposable]] to dispose the [[Subscription]]
+ */
+ final def subscribe(consumer: T => Unit, errorConsumer: Throwable => Unit, completeConsumer: => Unit): Disposable = coreMono.subscribe(consumer, errorConsumer, completeConsumer)
+
+ /**
+ * Subscribe `consumer` to this [[SMono]] that will consume all the
+ * sequence.
+ *
+ * For a passive version that observe and forward incoming data see [[SMono.doOnSuccess]] and
+ * [[SMono.doOnError]].
+ *
+ *
+ *
+ *
+ * @param consumer the consumer to invoke on each value
+ * @param errorConsumer the consumer to invoke on error signal
+ * @param completeConsumer the consumer to invoke on complete signal
+ * @param subscriptionConsumer the consumer to invoke on subscribe signal, to be used
+ * for the initial [[Subscription.request request]], or null for max request
+ * @return a new [[Disposable]] to dispose the [[Subscription]]
+ */
+ final def subscribe(consumer: T => Unit, errorConsumer: Throwable => Unit, completeConsumer: => Unit, subscriptionConsumer: Subscription => Unit): Disposable = coreMono.subscribe(consumer, errorConsumer, completeConsumer, subscriptionConsumer)
+
+ override def subscribe(s: Subscriber[_ >: T]): Unit = coreMono.subscribe(s)
+
+ /**
+ * Enrich a potentially empty downstream [[Context]] by adding all values
+ * from the given [[Context]], producing a new [[Context]] that is propagated
+ * upstream.
+ *
+ * The [[Context]] propagation happens once per subscription (not on each onNext):
+ * it is done during the `subscribe(Subscriber)` phase, which runs from
+ * the last operator of a chain towards the first.
+ *
+ * So this operator enriches a [[Context]] coming from under it in the chain
+ * (downstream, by default an empty one) and passes the new enriched [[Context]]
+ * to operators above it in the chain (upstream, by way of them using
+ * [[Flux.subscribe(Subscriber,Context)]]).
+ *
+ * @param mergeContext the [[Context]] to merge with a previous [[Context]]
+ * state, returning a new one.
+ * @return a contextualized [[SMono]]
+ * @see [[Context]]
+ */
+ final def subscriberContext(mergeContext: Context): SMono[T] = coreMono.subscriberContext(mergeContext)
+
+ /**
+ * Enrich a potentially empty downstream [[Context]] by applying a [[Function1]]
+ * to it, producing a new [[Context]] that is propagated upstream.
+ *
+ * The [[Context]] propagation happens once per subscription (not on each onNext):
+ * it is done during the `subscribe(Subscriber)` phase, which runs from
+ * the last operator of a chain towards the first.
+ *
+ * So this operator enriches a [[Context]] coming from under it in the chain
+ * (downstream, by default an empty one) and passes the new enriched [[Context]]
+ * to operators above it in the chain (upstream, by way of them using
+ * `Flux#subscribe(Subscriber,Context)`).
+ *
+ * @param doOnContext the function taking a previous [[Context]] state
+ * and returning a new one.
+ * @return a contextualized [[SMono]]
+ * @see [[Context]]
+ */
+ final def subscriberContext(doOnContext: Context => Context): SMono[T] = coreMono.subscriberContext(doOnContext)
+
+ /**
+ * Run the requests to this Publisher [[SMono]] on a given worker assigned by the supplied [[Scheduler]].
+ *
+ * `mono.subscribeOn(Schedulers.parallel()).subscribe())`
+ *
+ *
+ *
+ *
+ *
+ * @param scheduler a checked [[reactor.core.scheduler.Scheduler.Worker]] factory
+ * @return an asynchronously requesting [[SMono]]
+ */
+ final def subscribeOn(scheduler: Scheduler): SMono[T] = coreMono.subscribeOn(scheduler)
+
+ /**
+ * Subscribe the given [[Subscriber]] to this [[SMono]] and return said
+ * [[Subscriber]] (eg. a [[reactor.core.publisher.MonoProcessor]].
+ *
+ * @param subscriber the [[Subscriber]] to subscribe with
+ * @param < E> the reified type of the [[Subscriber]] for chaining
+ * @return the passed [[Subscriber]] after subscribing it to this [[SMono]]
+ */
+ final def subscribeWith[E <: Subscriber[_ >: T]](subscriber: E): E = coreMono.subscribeWith(subscriber)
+
+ /**
+ * Provide an alternative [[SMono]] if this mono is completed without data
+ *
+ *
+ *
+ *
+ *
+ * @param alternate the alternate mono if this mono is empty
+ * @return an alternating [[SMono]] on source onComplete without elements
+ * @see [[SFlux.switchIfEmpty]]
+ */
+ final def switchIfEmpty(alternate: SMono[_ <: T]): SMono[T] = coreMono.switchIfEmpty(alternate.coreMono)
+
+ /**
+ * Tag this mono with a key/value pair. These can be retrieved as a [[Stream]] of
+ * all tags throughout the publisher chain by using [[reactor.core.scala.Scannable.tags()]] (as
+ * traversed
+ * by [[reactor.core.scala.Scannable.parents()]]).
+ *
+ * @param key a tag key
+ * @param value a tag value
+ * @return the same sequence, but bearing tags
+ */
+ final def tag(key: String, value: String): SMono[T] = coreMono.tag(key, value)
+
+ /**
+ * Give this Mono a chance to resolve within a specified time frame but complete if it
+ * doesn't. This works a bit like [[SMono.timeout(Duration)]] except that the resulting
+ * [[Mono]] completes rather than errors when the timer expires.
+ *
+ * The timeframe is evaluated using the provided [[Scheduler]].
+ *
+ * @param duration the maximum duration to wait for the source Mono to resolve.
+ * @param timer the [[Scheduler]] on which to measure the duration.
+ * @return a new [[SMono]] that will propagate the signals from the source unless
+ * no signal is received for `duration`, in which case it completes.
+ */
+ final def take(duration: Duration, timer: Scheduler = Schedulers.parallel()): SMono[T] = coreMono.take(duration, timer)
+
+ /**
+ * Give this Mono a chance to resolve before a companion [[Publisher]] emits. If
+ * the companion emits before any signal from the source, the resulting SMono will
+ * complete. Otherwise, it will relay signals from the source.
+ *
+ * @param other a companion [[Publisher]] that short-circuits the source with an
+ * onComplete signal if it emits before the source emits.
+ * @return a new [[SMono]] that will propagate the signals from the source unless
+ * a signal is first received from the companion [[Publisher]], in which case it
+ * completes.
+ */
+ final def takeUntilOther(other: Publisher[_]): SMono[T] = coreMono.takeUntilOther(other)
+
+ /**
+ * Return an `SMono[Unit]` which only replays complete and error signals
+ * from this [[SMono]].
+ *
+ *
+ *
+ *
+ *
+ * @return a [[SMono]] ignoring its payload (actively dropping)
+ */
+ final def `then`(): SMono[Unit] = new ReactiveSMono[Unit](coreMono.`then`().map((_: Void) => ()))
+
+ /**
+ * Ignore element from this [[SMono]] and transform its completion signal into the
+ * emission and completion signal of a provided `Mono[V]`. Error signal is
+ * replayed in the resulting `SMono[V]`.
+ *
+ *
+ *
+ *
+ * @param other a [[SMono]] to emit from after termination
+ * @tparam V the element type of the supplied Mono
+ * @return a new [[SMono]] that emits from the supplied [[SMono]]
+ */
+ final def `then`[V](other: SMono[V]): SMono[V] = coreMono.`then`(other.coreMono)
+
+ /**
+ * Return a `SMono[Unit]` that waits for this [[SMono]] to complete then
+ * for a supplied [[Publisher Publisher[Unit]]] to also complete. The
+ * second completion signal is replayed, or any error signal that occurs instead.
+ *
+ *
+ *
+ * @param other a [[Publisher]] to wait for after this Mono's termination
+ * @return a new [[SMono]] completing when both publishers have completed in
+ * sequence
+ */
+ final def thenEmpty(other: MapablePublisher[Unit]): SMono[Unit] = new ReactiveSMono[Unit]((coreMono: JMono[T]).thenEmpty(other).map((_: Void) => ()))
+
+ /**
+ * Ignore element from this mono and transform the completion signal into a
+ * `SFlux[V]` that will emit elements from the provided [[Publisher]].
+ *
+ *
+ *
+ *
+ * @param other a [[Publisher]] to emit from after termination
+ * @tparam V the element type of the supplied Publisher
+ * @return a new [[SMono]] that emits from the supplied [[Publisher]] after
+ * this SMono completes.
+ */
+ final def thenMany[V](other: Publisher[V]): SFlux[V] = coreMono.thenMany(other)
+
+ /**
+ * Switch to a fallback [[SMono]] in case an item doesn't arrive before the given period.
+ *
+ *
If the given [[Publisher]] is null, signal a [[java.util.concurrent.TimeoutException]].
+ *
+ *
+ *
+ *
+ * @param timeout the timeout before the onNext signal from this [[SMono]]
+ * @param fallback the fallback [[SMono]] to subscribe when a timeout occurs
+ * @param timer a time-capable [[Scheduler]] instance to run on
+ * @return an expirable [[SMono]] with a fallback [[SMono]]
+ */
+ final def timeout(timeout: Duration, fallback: Option[SMono[_ <: T]] = None, timer: Scheduler = Schedulers.parallel()): SMono[T] =
+ coreMono.timeout(timeout, fallback.map(_.coreMono).orNull[JMono[_ <: T]], timer)
+
+ /**
+ * Switch to a fallback [[Publisher]] in case the item from this {@link Mono} has
+ * not been emitted before the given [[Publisher]] emits.
+ *
+ *
+ *
+ *
+ * @param firstTimeout the timeout
+ * [[Publisher]] that must not emit before the first signal from this [[SMono]]
+ * @param fallback the fallback [[Publisher]] to subscribe when a timeout occurs
+ * @tparam U the element type of the timeout Publisher
+ * @return an expirable [[SMono]] with a fallback [[SMono]] if the item doesn't
+ * come before a [[Publisher]] signals
+ *
+ */
+ final def timeoutWhen[U](firstTimeout: Publisher[U], fallback: Option[SMono[_ <: T]] = None, timer: Scheduler = Schedulers.parallel()): SMono[T] = {
+ val x = fallback.map((sm: SMono[_ <: T]) => coreMono.timeout[U](firstTimeout, sm.coreMono))
+ .getOrElse(coreMono.timeout[U](firstTimeout))
+ new ReactiveSMono[T](x)
+ }
+
+ /**
+ * Emit a [[Tuple2]] pair of T1 [[Long]] current system time in
+ * millis and T2 `T` associated data for the eventual item from this [[SMono]]
+ *
+ *
+ *
+ *
+ * @param scheduler a [[Scheduler]] instance to read time from
+ * @return a timestamped [[SMono]]
+ */
+ // How to test this?
+ final def timestamp(scheduler: Scheduler = Schedulers.parallel()): SMono[(Long, T)] = new ReactiveSMono[(Long, T)](coreMono.timestamp(scheduler).map((t2: Tuple2[JLong, T]) => (Long2long(t2.getT1), t2.getT2)))
+
+ /**
+ * Transform this [[SMono]] into a [[Future]] completing on onNext or onComplete and failing on
+ * onError.
+ *
+ *
+ *
+ *
+ *
+ * @return a [[Future]]
+ */
+ final def toFuture: Future[T] = {
+ val promise = Promise[T]()
+ coreMono.toFuture.handle[Unit]((value: T, throwable: Throwable) => {
+ Option(value).foreach(v => promise.complete(Try(v)))
+ Option(throwable).foreach(t => promise.failure(t))
+ ()
+ })
+ promise.future
+ }
+
+ /**
+ * Transform this [[SMono]] in order to generate a target [[SMono]]. Unlike [[SMono.compose]], the
+ * provided function is executed as part of assembly.
+ *
+ * @example {{{
+ * val applySchedulers = mono => mono.subscribeOn(Schedulers.elastic()).publishOn(Schedulers.parallel());
+ * mono.transform(applySchedulers).map(v => v * v).subscribe()
+ * }}}
+ * @param transformer the [[Function1]] to immediately map this [[SMono]] into a target [[SMono]]
+ * instance.
+ * @tparam V the item type in the returned [[SMono]]
+ * @return a new [[SMono]]
+ * @see [[SMono.compose]] for deferred composition of [[SMono]] for each [[Subscriber]]
+ * @see [[SMono.as]] for a loose conversion to an arbitrary type
+ */
+ final def transform[V](transformer: SMono[T] => Publisher[V]): SMono[V] = coreMono.transform[V]((_: JMono[T]) => transformer(SMono.this))
+
+}
+
+object SMono {
+
+ /**
+ * An alias of [[SMono.fromPublisher]]
+ * @param source The underlying [[Publisher]]. This can be used to convert [[JMono]] into [[SMono]]
+ * @tparam T a value type parameter of this [[SMono]]
+ * @return [[SMono]]
+ */
+ def apply[T](source: Publisher[_ <: T]): SMono[T] = SMono.fromPublisher[T](source)
+
+ /**
+ * Creates a deferred emitter that can be used with callback-based
+ * APIs to signal at most one value, a complete or an error signal.
+ *
+ * Bridging legacy API involves mostly boilerplate code due to the lack
+ * of standard types and methods. There are two kinds of API surfaces:
+ * 1) addListener/removeListener and 2) callback-handler.
+ *
+ * 1) addListener/removeListener pairs
+ * To work with such API one has to instantiate the listener,
+ * call the sink from the listener then register it with the source:
+ *
+ * SMono.<String>create(sink => {
+ * HttpListener listener = event => {
+ * if (event.getResponseCode() >= 400) {
+ * sink.error(new RuntimeException("Failed"));
+ * } else {
+ * String body = event.getBody();
+ * if (body.isEmpty()) {
+ * sink.success();
+ * } else {
+ * sink.success(body.toLowerCase());
+ * }
+ * }
+ * };
+ *
+ * client.addListener(listener);
+ *
+ * sink.onDispose(() => client.removeListener(listener));
+ * });
+ *
+ * Note that this works only with single-value emitting listeners. Otherwise,
+ * all subsequent signals are dropped. You may have to add `client.removeListener(this);`
+ * to the listener's body.
+ *
+ * 2) callback handler
+ * This requires a similar instantiation pattern such as above, but usually the
+ * successful completion and error are separated into different methods.
+ * In addition, the legacy API may or may not support some cancellation mechanism.
+ *
+ * SMono.<String>create(sink => {
+ * Callback<String> callback = new Callback<String>() {
+ * @Override
+ * public void onResult(String data) {
+ * sink.success(data.toLowerCase());
+ * }
+ *
+ * @Override
+ * public void onError(Exception e) {
+ * sink.error(e);
+ * }
+ * }
+ *
+ * // without cancellation support:
+ *
+ * client.call("query", callback);
+ *
+ * // with cancellation support:
+ *
+ * AutoCloseable cancel = client.call("query", callback);
+ * sink.onDispose(() => {
+ * try {
+ * cancel.close();
+ * } catch (Exception ex) {
+ * Exceptions.onErrorDropped(ex);
+ * }
+ * });
+ * });
+ *
+ *
+ * @param callback Consume the { @link MonoSink} provided per-subscriber by Reactor to generate signals.
+ * @tparam T The type of the value emitted
+ * @return a [[SMono]]
+ */
+ def create[T](callback: MonoSink[T] => Unit): SMono[T] = JMono.create[T](callback)
+
+ def defer[T](supplier: () => SMono[T]): SMono[T] = JMono.defer[T](supplier)
+
+ def delay(duration: Duration, timer: Scheduler = Schedulers.parallel()): SMono[Long] = new ReactiveSMono[Long](JMono.delay(duration, timer))
+
+ def empty[T]: SMono[T] = JMono.empty[T]()
+
+ /**
+ * Pick the first result coming from any of the given monos and populate a new `Mono`.
+ *
+ *
+ *
+ *
+ *
+ * @param monos The deferred monos to use.
+ * @tparam T The type of the function result.
+ * @return a [[SMono]].
+ */
+ def firstEmitter[T](monos: SMono[_ <: T]*): SMono[T] = JMono.first[T](monos.map(_.asJava()): _*)
+
+ def fromPublisher[T](source: Publisher[_ <: T]): SMono[T] = JMono.from[T](source)
+
+ def fromCallable[T](supplier: Callable[T]): SMono[T] = JMono.fromCallable[T](supplier)
+
+ def fromDirect[I](source: Publisher[_ <: I]): SMono[I] = JMono.fromDirect[I](source)
+
+ def fromFuture[T](future: Future[T])(implicit executionContext: ExecutionContext): SMono[T] = {
+ val completableFuture = new CompletableFuture[T]()
+ future onComplete {
+ case Success(t) => completableFuture.complete(t)
+ case Failure(error) => completableFuture.completeExceptionally(error)
+ }
+ JMono.fromFuture[T](completableFuture)
+ }
+
+ /**
+ * Transform a [[Try]] into an [[SMono]]
+ * @param aTry a Try
+ * @tparam T The type of the [[Try]]
+ * @return an [[SMono]]
+ */
+ def fromTry[T](aTry: => Try[T]): SMono[T] = create[T](sink => {
+ aTry match {
+ case Success(t) => sink.success(t)
+ case Failure(ex) => sink.error(ex)
+ }
+ })
+
+ def ignoreElements[T](source: Publisher[T]): SMono[T] = JMono.ignoreElements(source)
+
+ def just[T](data: T): SMono[T] = new ReactiveSMono[T](JMono.just(data))
+
+ def justOrEmpty[T](data: Option[_ <: T]): SMono[T] = JMono.justOrEmpty[T](data)
+
+ def justOrEmpty[T](data: Any): SMono[T] = {
+ data match {
+ case o: Option[T] => JMono.justOrEmpty[T](o)
+ case other: T => JMono.justOrEmpty[T](other)
+ case null => JMono.empty[T]()
+ }
+ }
+
+ def never[T]: SMono[T] = JMono.never[T]()
+
+ def sequenceEqual[T](source1: Publisher[_ <: T], source2: Publisher[_ <: T], isEqual: (T, T) => Boolean = (t1: T, t2: T) => t1 == t2, bufferSize: Int = SMALL_BUFFER_SIZE): SMono[Boolean] =
+ new ReactiveSMono[JBoolean](JMono.sequenceEqual[T](source1, source2, isEqual, bufferSize)).map(Boolean2boolean)
+
+ /**
+ * Create a [[Mono]] emitting the [[Context]] available on subscribe.
+ * If no Context is available, the mono will simply emit the
+ * [[Context.empty() empty Context].
+ *
+ *
+ *
+ *
+ *
+ * @return a new [[Mono]] emitting current context
+ * @see [[Mono.subscribe(CoreSubscriber)]]
+ */
+ def subscribeContext(): SMono[Context] = JMono.subscriberContext()
+
+ def raiseError[T](error: Throwable): SMono[T] = JMono.error[T](error)
+
+ /**
+ * Aggregate given void publishers into a new a `Mono` that will be
+ * fulfilled when all of the given `Monos` have been fulfilled. If any Mono terminates without value,
+ * the returned sequence will be terminated immediately and pending results cancelled.
+ *
+ *
+ *
+ *
+ *
+ * @param sources The sources to use.
+ * @return a [[SMono]].
+ */
+ def when(sources: Iterable[_ <: Publisher[Unit] with MapablePublisher[Unit]]): SMono[Unit] = {
+ new ReactiveSMono[Unit](
+ JMono.when(sources.map(s => s.map((_: Unit) => None.orNull: Void)).asJava).map((_: Void) => ())
+ )
+ }
+
+ /**
+ * Aggregate given publishers into a new `Mono` that will be fulfilled
+ * when all of the given `sources` have been fulfilled. An error will cause
+ * pending results to be cancelled and immediate error emission to the returned [[SMono]].
+ *
+ *
+ *
+ *
+ * @param sources The sources to use.
+ * @return a [[SMono]].
+ */
+ def when(sources: Publisher[Unit] with MapablePublisher[Unit]*): SMono[Unit] = new ReactiveSMono[Unit](
+ JMono.when(sources.map(s => s.map((_: Unit) => None.orNull: Void)).asJava).map((_: Void) => ())
+ )
+
+ def zipDelayError[T1, T2](p1: SMono[_ <: T1], p2: SMono[_ <: T2]): SMono[(T1, T2)] = {
+ new ReactiveSMono[(T1, T2)](JMono.zipDelayError[T1, T2](p1.coreMono, p2.coreMono).map((t: Tuple2[T1, T2]) => tupleTwo2ScalaTuple2(t)))
+ }
+
+ def zipDelayError[T1, T2, T3](p1: SMono[_ <: T1], p2: SMono[_ <: T2], p3: SMono[_ <: T3]): SMono[(T1, T2, T3)] = {
+ new ReactiveSMono[(T1, T2, T3)](JMono.zipDelayError[T1, T2, T3](p1.coreMono, p2.coreMono, p3.coreMono).map((t: Tuple3[T1, T2, T3]) => tupleThree2ScalaTuple3(t)))
+ }
+
+ def zipDelayError[T1, T2, T3, T4](p1: SMono[_ <: T1], p2: SMono[_ <: T2], p3: SMono[_ <: T3], p4: SMono[_ <: T4]): SMono[(T1, T2, T3, T4)] = {
+ new ReactiveSMono[(T1, T2, T3, T4)](
+ JMono.zipDelayError[T1, T2, T3, T4](p1.coreMono, p2.coreMono, p3.coreMono, p4.coreMono).map((t: Tuple4[T1, T2, T3, T4]) => tupleFour2ScalaTuple4(t))
+ )
+ }
+
+ def zipDelayError[T1, T2, T3, T4, T5](p1: SMono[_ <: T1], p2: SMono[_ <: T2], p3: SMono[_ <: T3], p4: SMono[_ <: T4], p5: SMono[_ <: T5]): SMono[(T1, T2, T3, T4, T5)] = {
+ new ReactiveSMono[(T1, T2, T3, T4, T5)](
+ JMono.zipDelayError[T1, T2, T3, T4, T5](p1.coreMono, p2.coreMono, p3.coreMono, p4.coreMono, p5.coreMono).map((t: Tuple5[T1, T2, T3, T4, T5]) => tupleFive2ScalaTuple5(t))
+ )
+ }
+
+ def zipDelayError[T1, T2, T3, T4, T5, T6](p1: SMono[_ <: T1], p2: SMono[_ <: T2], p3: SMono[_ <: T3], p4: SMono[_ <: T4], p5: SMono[_ <: T5], p6: SMono[_ <: T6]): SMono[(T1, T2, T3, T4, T5, T6)] = new ReactiveSMono[(T1, T2, T3, T4, T5, T6)](
+ JMono.zipDelayError[T1, T2, T3, T4, T5, T6](p1.coreMono, p2.coreMono, p3.coreMono, p4.coreMono, p5.coreMono, p6.coreMono).map((t: Tuple6[T1, T2, T3, T4, T5, T6]) => tupleSix2ScalaTuple6(t))
+ )
+
+ def whenDelayError(sources: Iterable[_ <: Publisher[_] with MapablePublisher[_]]): SMono[Unit] = new ReactiveSMono[Unit](
+ JMono.whenDelayError(sources.map(s => s.map((_: Any) => None.orNull: Void)).asJava).map((_: Void) => ())
+ )
+
+ def zipDelayError[R](monos: Iterable[_ <: SMono[_]], combinator: Array[AnyRef] => _ <: R): SMono[R] = {
+ new ReactiveSMono[R](JMono.zipDelayError[R](monos.map(_.asJava()).asJava, new Function[Array[AnyRef], R] {
+ override def apply(t: Array[AnyRef]): R = {
+ val v = t.map { v => v: AnyRef }
+ combinator(v)
+ }
+ }))
+ }
+
+ def zipDelayError[R](combinator: Array[Any] => R, monos: SMono[_]*): SMono[R] = {
+ val combinatorFunction = new Function[Array[AnyRef], R] {
+ override def apply(t: Array[AnyRef]): R = {
+ val v = t.map { v => v: Any }
+ combinator(v)
+ }
+ }
+ new ReactiveSMono[R](JMono.zipDelayError[R](combinatorFunction, monos.map(_.asJava()): _*))
+ }
+
+ def zip[R](combinator: Array[AnyRef] => R, monos: SMono[_]*): SMono[R] = new ReactiveSMono[R](JMono.zip(combinator, monos.map(_.asJava()).toArray: _*))
+
+ def zip[R](monos: Iterable[_ <: SMono[_]], combinator: Array[AnyRef] => R): SMono[R] =
+ new ReactiveSMono[R](JMono.zip(monos.map(_.asJava()).asJava, new Function[Array[Object], R] {
+ override def apply(t: Array[Object]) = combinator(t.map { v => Option(v): Option[AnyRef] }.filterNot(_.isEmpty).map(_.getOrElse(None.orNull)))
+ }))
+}
+
+private[publisher] class ReactiveSMono[T](publisher: Publisher[T]) extends SMono[T] with Scannable {
+ override private[publisher] def coreMono: JMono[T] = JMono.from[T](publisher)
+
+ override private[scala] def jScannable: JScannable = JScannable.from(coreMono)
+}
\ No newline at end of file
diff --git a/src/main/scala/reactor/core/scala/publisher/SMonoLike.scala b/src/main/scala/reactor/core/scala/publisher/SMonoLike.scala
new file mode 100644
index 00000000..c85bc66c
--- /dev/null
+++ b/src/main/scala/reactor/core/scala/publisher/SMonoLike.scala
@@ -0,0 +1,8 @@
+package reactor.core.scala.publisher
+
+import scala.language.higherKinds
+
+trait SMonoLike[T, Self[U] <: SMonoLike[U, Self]] {
+ self: Self[T] =>
+
+}
diff --git a/src/main/scala/reactor/core/scala/publisher/ScalaConverters.scala b/src/main/scala/reactor/core/scala/publisher/ScalaConverters.scala
new file mode 100644
index 00000000..4d2bcb6e
--- /dev/null
+++ b/src/main/scala/reactor/core/scala/publisher/ScalaConverters.scala
@@ -0,0 +1,17 @@
+package reactor.core.scala.publisher
+
+import reactor.core.publisher.{Flux => JFlux, Mono => JMono}
+
+import scala.language.implicitConversions
+
+trait ScalaConverters {
+ implicit class PimpJMono[T](jMono: JMono[T]) {
+ def asScala: SMono[T] = new ReactiveSMono[T](jMono)
+ }
+
+ implicit class PimpJFlux[T](jFlux: JFlux[T]) {
+ def asScala: SFlux[T] = new ReactiveSFlux[T](jFlux)
+ }
+}
+
+object ScalaConverters extends ScalaConverters
diff --git a/src/main/scala/reactor/core/scala/publisher/package.scala b/src/main/scala/reactor/core/scala/publisher/package.scala
index 969d9b66..78bb69b6 100644
--- a/src/main/scala/reactor/core/scala/publisher/package.scala
+++ b/src/main/scala/reactor/core/scala/publisher/package.scala
@@ -1,20 +1,20 @@
package reactor.core.scala
-import java.lang.{Boolean => JBoolean, Iterable => JIterable, Long => JLong}
+import java.lang.{Iterable => JIterable, Long => JLong}
import java.time.{Duration => JDuration}
-import java.util.{Optional, Spliterator, Spliterators}
import java.util.Optional.empty
import java.util.concurrent.Callable
import java.util.function.{BiConsumer, BiFunction, BiPredicate, BooleanSupplier, Consumer, Function, LongConsumer, Predicate, Supplier}
import java.util.stream.{StreamSupport, Stream => JStream}
+import java.util.{Optional, Spliterator, Spliterators}
import org.reactivestreams.Publisher
import reactor.core.publisher.{Flux => JFlux, Mono => JMono}
import reactor.util.function.{Tuple2, Tuple3, Tuple4, Tuple5, Tuple6}
+import scala.collection.JavaConverters._
import scala.concurrent.duration.Duration
import scala.language.implicitConversions
-import scala.collection.JavaConverters._
/**
* Created by winarto on 12/31/16.
@@ -46,16 +46,10 @@ package object publisher {
(javaTuple6.getT1, javaTuple6.getT2, javaTuple6.getT3, javaTuple6.getT4, javaTuple6.getT5, javaTuple6.getT6)
}
-/*
-Uncomment this when used. It is not used for now and reduce the code coverage
- implicit def try2Boolean[T](atry: Try[T]): Boolean = atry match {
- case Success(_) => true
- case Failure(_) => false
- }
-*/
+ implicit def javaTupleLongAndT2ScalaTupleLongAndT[T](tuple2: Tuple2[JLong, T]): (Long, T) = (tuple2.getT1, tuple2.getT2)
- type SConsumer[T] = (T => Unit)
- type SPredicate[T] = (T => Boolean)
+ type SConsumer[T] = T => Unit
+ type SPredicate[T] = T => Boolean
type SBiConsumer[T, U] = (T, U) => Unit
type JBiConsumer[T, U] = BiConsumer[T, U]
@@ -122,19 +116,22 @@ Uncomment this when used. It is not used for now and reduce the code coverage
}
}
- implicit def scalaSupplierMonoR2JavaSupplierJMonoR[R](supplier: () => Mono[R]): Supplier[JMono[R]] = {
+ implicit def scalaSupplierSMonoR2JavaSupplierJMonoR[R](supplier: () => SMono[R]): Supplier[JMono[R]] = {
new Supplier[JMono[R]] {
override def get(): JMono[R] = supplier().asJava()
}
}
- implicit def publisherUnit2PublisherVoid(publisher: Publisher[Unit]): Publisher[Void] = {
- publisher match {
- case m: Mono[Unit] => m.map[Void](_ => null: Void)
- case f: Flux[Unit] => f.map[Void](_ => null: Void)
+ implicit def scalaSupplierMonoR2JavaSupplierJMonoR[R](supplier: () => Mono[R]): Supplier[JMono[R]] = {
+ new Supplier[JMono[R]] {
+ override def get(): JMono[R] = supplier().asJava()
}
}
+ implicit def publisherUnit2PublisherVoid(publisher: MapablePublisher[Unit]): Publisher[Void] = {
+ publisher.map[Void](_ => null: Void)
+ }
+
implicit def scalaBiFunction2JavaBiFunction[T, U, V](biFunction: (T, U) => V): BiFunction[T, U, V] = {
new BiFunction[T, U, V] {
override def apply(t: T, u: U): V = biFunction(t, u)
diff --git a/src/test/scala/reactor/core/scala/publisher/FluxTest.scala b/src/test/scala/reactor/core/scala/publisher/FluxTest.scala
index 3fc159a6..2bf43816 100644
--- a/src/test/scala/reactor/core/scala/publisher/FluxTest.scala
+++ b/src/test/scala/reactor/core/scala/publisher/FluxTest.scala
@@ -49,7 +49,7 @@ class FluxTest extends FreeSpec with Matchers with TableDrivenPropertyChecks wit
.verifyComplete()
}
"with source1, source2 and combinator should produce latest elements into a single element" in {
- val flux = Flux.combineLatest(Mono.just(1), Mono.just("a"), (int: Int, string: String) => s"${int.toString}-$string")
+ val flux = Flux.combineLatest(Mono.just(1), Mono.just("a"), (int: Int, string: String) => s"$int-$string")
StepVerifier.create(flux)
.expectNext("1-a")
.verifyComplete()
@@ -692,7 +692,7 @@ class FluxTest extends FreeSpec with Matchers with TableDrivenPropertyChecks wit
val data = Table(
("scenario", "timespan", "timeshift", "expected"),
("timeshift > timespan", 1500 milliseconds, 2 seconds, Seq(Seq(0l), Seq(1l, 2l), Seq(3l, 4l))),
- ("timeshift < timespan", 1500 milliseconds, 1 second, Seq(Seq(0l), Seq(1l), Seq(2l), Seq(3l), Seq(4l))),
+ ("timeshift < timespan", 1500 milliseconds, 1 second, Seq(Seq(0l), Seq(0l, 1l), Seq(1l, 2l), Seq(2l, 3l), Seq(3l, 4l), Seq(4l))),
("timeshift = timespan", 1500 milliseconds, 1500 milliseconds, Seq(Seq(0l), Seq(1l), Seq(2l, 3l), Seq(4l)))
)
forAll(data) { (scenario, timespan, timeshift, expected) => {
@@ -973,7 +973,6 @@ class FluxTest extends FreeSpec with Matchers with TableDrivenPropertyChecks wit
} finally {
VirtualTimeScheduler.reset()
}
-
}
".delaySequence" - {
@@ -1582,8 +1581,7 @@ class FluxTest extends FreeSpec with Matchers with TableDrivenPropertyChecks wit
}
".ofType should filter the value emitted by this flux according to the class" in {
- val flux = Flux.just(1, "2", "3", 4).ofType(classOf[String])
- StepVerifier.create(flux)
+ StepVerifier.create(Flux.just(1, "2", "3", 4).ofType(classOf[String]))
.expectNext("2", "3")
.verifyComplete()
}
@@ -1727,7 +1725,7 @@ class FluxTest extends FreeSpec with Matchers with TableDrivenPropertyChecks wit
"with numRepeat should repeat as many as the provided parameter" in {
val flux = Flux.just(1, 2, 3).repeat(3)
StepVerifier.create(flux)
- .expectNext(1, 2, 3, 1, 2, 3, 1, 2, 3)
+ .expectNext(1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3)
.verifyComplete()
}
"with numRepeat and predicate should repeat as many as provided parameter and as long as the predicate returns true" in {
diff --git a/src/test/scala/reactor/core/scala/publisher/MonoTest.scala b/src/test/scala/reactor/core/scala/publisher/MonoTest.scala
index 4305891f..3e5ff63f 100644
--- a/src/test/scala/reactor/core/scala/publisher/MonoTest.scala
+++ b/src/test/scala/reactor/core/scala/publisher/MonoTest.scala
@@ -204,6 +204,13 @@ class MonoTest extends FreeSpec with Matchers with TableDrivenPropertyChecks wit
.expectNext(randomValue)
.verifyComplete()
}
+ "just react on completion signal if it is null" in {
+ val nullData:Any = null
+ val mono = Mono.justOrEmpty(nullData)
+ StepVerifier.create(mono)
+ .expectComplete()
+ .verify()
+ }
}
}
@@ -644,6 +651,8 @@ class MonoTest extends FreeSpec with Matchers with TableDrivenPropertyChecks wit
((_: RuntimeException) => atomicBoolean.compareAndSet(false, true) shouldBe true): SConsumer[RuntimeException])
StepVerifier.create(mono)
.expectError(classOf[RuntimeException])
+ .verify()
+ atomicBoolean shouldBe 'get
}
"with predicate and callback fnction should call the callback function when the predicate returns true" in {
val atomicBoolean = new AtomicBoolean(false)
@@ -659,7 +668,11 @@ class MonoTest extends FreeSpec with Matchers with TableDrivenPropertyChecks wit
".doOnRequest should call the callback function when subscriber request data" in {
val atomicLong = new AtomicLong(0)
val mono = Mono.just(randomValue)
- .doOnRequest(l => atomicLong.compareAndSet(0, l))
+ .doOnRequest{
+ l => {
+ atomicLong.compareAndSet(0, l)
+ }
+ }
mono.subscribe(new BaseSubscriber[Long] {
override def hookOnSubscribe(subscription: Subscription): Unit = {
subscription.request(1)
@@ -814,12 +827,11 @@ class MonoTest extends FreeSpec with Matchers with TableDrivenPropertyChecks wit
}
".handle should handle onNext, onError and onComplete" in {
- val mono = Mono.just(randomValue)
+ StepVerifier.create(Mono.just(randomValue)
.handle((_: Long, s: SynchronousSink[String]) => {
s.next("One")
s.complete()
- })
- StepVerifier.create(mono)
+ }))
.expectNext("One")
.verifyComplete()
}
@@ -1054,7 +1066,7 @@ class MonoTest extends FreeSpec with Matchers with TableDrivenPropertyChecks wit
"with number of repeat should repeat value from this value as many as the provided parameter" in {
val flux = Mono.just(randomValue).repeat(5)
StepVerifier.create(flux)
- .expectNext(randomValue, randomValue, randomValue, randomValue, randomValue)
+ .expectNext(randomValue, randomValue, randomValue, randomValue, randomValue, randomValue)
.verifyComplete()
}
"with number of repeat and predicate should repeat value from this value as many as provided parameter and as" +
@@ -1255,8 +1267,7 @@ class MonoTest extends FreeSpec with Matchers with TableDrivenPropertyChecks wit
}
".asJava should convert to java" in {
- val mono = Mono.just(randomValue).asJava()
- mono shouldBe a[JMono[_]]
+ Mono.just(randomValue).asJava() shouldBe a[JMono[_]]
}
".apply should convert to scala" in {
@@ -1281,5 +1292,4 @@ class MonoAsyncTest extends AsyncFreeSpec {
}
}
}
-
}
\ No newline at end of file
diff --git a/src/test/scala/reactor/core/scala/publisher/SFluxTest.scala b/src/test/scala/reactor/core/scala/publisher/SFluxTest.scala
new file mode 100644
index 00000000..597b74a0
--- /dev/null
+++ b/src/test/scala/reactor/core/scala/publisher/SFluxTest.scala
@@ -0,0 +1,1999 @@
+package reactor.core.scala.publisher
+
+import java.io._
+import java.nio.file.Files
+import java.util
+import java.util.concurrent.Callable
+import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger, AtomicLong, AtomicReference}
+import java.util.function.{Consumer, Predicate}
+
+import org.mockito.ArgumentMatchers
+import org.mockito.Mockito.{spy, verify}
+import org.reactivestreams.Subscription
+import org.scalatest.prop.TableDrivenPropertyChecks
+import org.scalatest.{FreeSpec, Matchers}
+import reactor.core.publisher.BufferOverflowStrategy.DROP_LATEST
+import reactor.core.publisher.{Flux => JFlux, _}
+import reactor.core.scala.Scannable
+import reactor.core.scheduler.Schedulers
+import reactor.test.StepVerifier
+import reactor.test.scheduler.VirtualTimeScheduler
+import reactor.util.concurrent.Queues
+
+import scala.collection.mutable
+import scala.collection.mutable.ListBuffer
+import scala.concurrent.TimeoutException
+import scala.concurrent.duration.{Duration, _}
+import scala.io.Source
+import scala.language.postfixOps
+import scala.math.Ordering.IntOrdering
+import scala.math.ScalaNumber
+import scala.util.{Failure, Try}
+import ScalaConverters._
+
+class SFluxTest extends FreeSpec with Matchers with TableDrivenPropertyChecks with TestSupport {
+ "SFlux" - {
+ ".apply should return a proper SFlux" in {
+ StepVerifier.create(SFlux(1, 2, 3))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+
+ ".asScala should transform Flux into SFlux" in {
+ JFlux.just(1, 2, 3).asScala shouldBe an[SFlux[_]]
+ }
+
+ ".combineLatest" - {
+ "of two should combine two publishers into single SFlux that emit tuple2" in {
+ StepVerifier.create(SFlux.combineLatest(Mono.just(1), Mono.just(2)))
+ .expectNext((1, 2))
+ .verifyComplete()
+ }
+ "of many should combine all of them into single SFlux that emit Seq" in {
+ StepVerifier.create(SFlux.combineLatest(Mono.just(1), Mono.just(2), Mono.just(3), Mono.just(4)))
+ .expectNext(Seq(1, 2, 3, 4))
+ .verifyComplete()
+ }
+ }
+
+ ".combineLatestMap" - {
+ "of two should combine two publishers into single SFlux and apply mapper" in {
+ StepVerifier.create(SFlux.combineLatestMap(Mono.just(1), Mono.just(2), (i: Int, j: Int) => s"$i-$j"))
+ .expectNext("1-2")
+ .verifyComplete()
+ }
+ "of many should combine them into single SFlux and apply mapper" in {
+ val flux = SFlux.combineLatestMap((array: Array[Int]) => s"${array(0)}-${array(1)}-${array(2)}", SFlux(1, 2), SFlux(10, 20), SFlux(100, 200))
+ StepVerifier.create(flux)
+ .expectNext("2-20-100")
+ .expectNext("2-20-200")
+ .verifyComplete()
+ }
+ }
+
+ ".concat" - {
+ "with varargs of publisher should concatenate the underlying publisher" in {
+ val flux = SFlux.concat(Flux.just(1, 2, 3), Mono.just(3), Flux.just(3, 4))
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3, 3, 3, 4)
+ .verifyComplete()
+ }
+ }
+
+ ".concatDelayError" - {
+ "with varargs of publishers should concatenate all sources emitted from parents" in {
+ val flux = SFlux.concatDelayError[Int](Mono.just(1), Mono.just(2), Mono.just(3))
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+ }
+
+ ".create should create a flux" in {
+ val flux = SFlux.create[Int]((emitter: FluxSink[Int]) => {
+ emitter.next(1)
+ emitter.complete()
+ })
+ StepVerifier.create(flux)
+ .expectNext(1)
+ .verifyComplete()
+ }
+
+ ".defer should create a flux" in {
+ def f = SFlux(1, 2, 3)
+
+ StepVerifier.create(SFlux.defer(f))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+
+ ".empty should return an empty SFlux" in {
+ StepVerifier.create(SFlux.empty)
+ .verifyComplete()
+ }
+
+ ".firstEmitter" - {
+ "with varargs of publisher should create Flux based on the publisher that emit first onNext or onComplete or onError" in {
+ val flux: SFlux[Long] = SFlux.firstEmitter(Mono.delay(Duration("10 seconds")), Mono.just[Long](1L))
+ StepVerifier.create(flux)
+ .expectNext(1)
+ .verifyComplete()
+ }
+ }
+
+ ".fromArray should create a flux that emits the items contained in the provided array" in {
+ StepVerifier.create(SFlux.fromArray(Array("1", "2", "3")))
+ .expectNext("1", "2", "3")
+ .verifyComplete()
+ }
+
+ ".fromIterable should create flux that emit the items contained in the provided iterable" in {
+ StepVerifier.create(SFlux.fromIterable(Iterable(1, 2, 3)))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+
+ ".fromPublisher should expose the specified publisher with flux API" in {
+ StepVerifier.create(SFlux.fromPublisher(Mono.just(1)))
+ .expectNext(1)
+ .verifyComplete()
+ }
+
+ ".fromStream" - {
+ "with supplier should create flux that emit items contained in the supplier" in {
+ StepVerifier.create(SFlux.fromStream(() => Stream(1, 2, 3)))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+ }
+
+ ".generate" - {
+ "with state supplier and state consumer" in {
+ val tempFile = Files.createTempFile("fluxtest-", ".tmp").toFile
+ tempFile.deleteOnExit()
+ new PrintWriter(tempFile) {
+ write(Range(1, 6).mkString(s"${sys.props("line.separator")}"))
+ flush()
+ close()
+ }
+ val flux = SFlux.generate[Int, BufferedReader](
+ (reader: BufferedReader, sink: SynchronousSink[Int]) => {
+ Option(reader.readLine()).filterNot(_.isEmpty).map(_.toInt) match {
+ case Some(x) => sink.next(x)
+ case None => sink.complete()
+ }
+ reader
+ }, Option((() => new BufferedReader(new InputStreamReader(new FileInputStream(tempFile)))): Callable[BufferedReader]),
+ Option((bufferredReader: BufferedReader) => bufferredReader.close())
+ )
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3, 4, 5)
+ .verifyComplete()
+ }
+ }
+
+ ".index" - {
+ "should return tuple with the index" in {
+ val flux = SFlux("a", "b", "c").index()
+ StepVerifier.create(flux)
+ .expectNext((0l, "a"), (1l, "b"), (2l, "c"))
+ .verifyComplete()
+ }
+ "with index mapper should return the mapped value" in {
+ val flux = SFlux("a", "b", "c").index((i, v) => s"$i-$v")
+ StepVerifier.create(flux)
+ .expectNext("0-a", "1-b", "2-c")
+ .verifyComplete()
+ }
+ }
+
+ ".interval" - {
+ "without delay should produce flux of Long starting from 0 every provided timespan immediately" in {
+ StepVerifier.withVirtualTime(() => SFlux.interval(1 second).take(5))
+ .thenAwait(5 seconds)
+ .expectNext(0, 1, 2, 3, 4)
+ .verifyComplete()
+ }
+ "with delay should produce flux of Long starting from 0 every provided timespan after provided delay" in {
+ StepVerifier.withVirtualTime(() => SFlux.interval(1 second)(2 seconds).take(5))
+ .thenAwait(11 seconds)
+ .expectNext(0, 1, 2, 3, 4)
+ .verifyComplete()
+ }
+ "with Scheduler should use the provided timed scheduler" in {
+ StepVerifier.withVirtualTime(() => SFlux.interval(1 second, Schedulers.single()).take(5))
+ .thenAwait(5 seconds)
+ .expectNext(0, 1, 2, 3, 4)
+ .verifyComplete()
+ }
+ "with delay and Scheduler should use the provided time scheduler after delay" in {
+ StepVerifier.withVirtualTime(() => SFlux.interval(2 seconds, Schedulers.single())(1 second).take(5))
+ .thenAwait(11 seconds)
+ .expectNext(0, 1, 2, 3, 4)
+ .verifyComplete()
+ }
+ }
+
+ ".just" - {
+ "with varargs should emit values from provided data" in {
+ val flux = SFlux.just(1, 2)
+ StepVerifier.create(flux)
+ .expectNext(1, 2)
+ .verifyComplete()
+ }
+ "with one element should emit value from provided data" in {
+ val flux = Flux.just[Int](1)
+ StepVerifier.create(flux)
+ .expectNext(1)
+ .verifyComplete()
+ }
+ }
+
+ ".mergeSequential" - {
+ "with publisher of publisher should merge the underlying publisher in sequence of publisher" in {
+ StepVerifier.create(SFlux.mergeSequentialPublisher[Int](SFlux(SFlux(1, 2, 3, 4), SFlux(2, 3, 4))))
+ .expectNext(1, 2, 3, 4, 2, 3, 4)
+ .verifyComplete()
+ }
+ "with publisher of publisher, maxConcurrency and prefetch should merge the underlying publisher in sequence of publisher" in {
+ StepVerifier.create(SFlux.mergeSequentialPublisher[Int](SFlux(SFlux(1, 2, 3), SFlux(2, 3, 4)), maxConcurrency = 8, prefetch = 2))
+ .expectNext(1, 2, 3, 2, 3, 4)
+ .verifyComplete()
+ }
+ "with publisher of publisher, delayError, maxConcurrency and prefetch should merge the underlying publisher in sequence of publisher" in {
+ StepVerifier.create(SFlux.mergeSequentialPublisher[Int](SFlux(SFlux(1, 2, 3), SFlux(2, 3, 4)), delayError = true, 8, 2))
+ .expectNext(1, 2, 3, 2, 3, 4)
+ .verifyComplete()
+ }
+ "with varargs of publishers should merge the underlying publisher in sequence of publisher" in {
+ StepVerifier.create[Int](SFlux.mergeSequential[Int](Seq(SFlux(1, 2, 3), SFlux(2, 3, 4))))
+ .expectNext(1, 2, 3, 2, 3, 4)
+ .verifyComplete()
+ }
+ "with prefetch and varargs of publishers should merge the underlying publisher in sequence of publisher" in {
+ StepVerifier.create(SFlux.mergeSequential[Int](Seq(SFlux(1, 2, 3), SFlux(2, 3, 4)), prefetch = 2))
+ .expectNext(1, 2, 3, 2, 3, 4)
+ .verifyComplete()
+ }
+ "with prefetch, delayError and varargs of publishers should merge the underlying publisher in sequence of publisher" in {
+ StepVerifier.create[Int](SFlux.mergeSequential[Int](Seq(SFlux(1, 2, 3), SFlux(2, 3, 4)), delayError = true, 2))
+ .expectNext(1, 2, 3, 2, 3, 4)
+ .verifyComplete()
+ }
+ "with iterable of publisher should merge the underlying publisher in sequence of the publisher" in {
+ StepVerifier.create(SFlux.mergeSequentialIterable[Int](Iterable(SFlux(1, 2, 3), SFlux(2, 3, 4))))
+ .expectNext(1, 2, 3, 2, 3, 4)
+ .verifyComplete()
+ }
+ "with iterable of publisher, maxConcurrency and prefetch should merge the underlying publisher in sequence of the publisher" in {
+ StepVerifier.create(SFlux.mergeSequentialIterable[Int](Iterable(SFlux(1, 2, 3), SFlux(2, 3, 4)), maxConcurrency = 8, prefetch = 2))
+ .expectNext(1, 2, 3, 2, 3, 4)
+ .verifyComplete()
+ }
+ "with iterable of publisher, delayError, maxConcurrency and prefetch should merge the underlying publisher in sequence of the publisher" in {
+ val flux = SFlux.mergeSequentialIterable[Int](Iterable(SFlux(1, 2, 3), SFlux(2, 3, 4)), delayError = true, 8, 2)
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3, 2, 3, 4)
+ .verifyComplete()
+ }
+ }
+
+ ".never should never emit any signal" in {
+ StepVerifier.create(SFlux.never())
+ .expectSubscription()
+ .expectNoEvent(Duration(1, "second"))
+ }
+
+ ".push should create a flux" in {
+ StepVerifier.create(SFlux.push[Int]((emitter: FluxSink[Int]) => {
+ emitter.next(1)
+ emitter.next(2)
+ emitter.complete()
+ }))
+ .expectNext(1, 2)
+ .verifyComplete()
+ }
+
+ ".raiseError" - {
+ "with throwable and whenRequest flag should" - {
+ "emit onError during onSubscribe if the flag is false" in {
+ val flag = new AtomicBoolean(false)
+ val flux = SFlux.raiseError(new RuntimeException())
+ .doOnRequest(_ => flag.compareAndSet(false, true))
+ Try(flux.subscribe(new BaseSubscriber[Long] {
+ override def hookOnSubscribe(subscription: Subscription): Unit = {
+ ()
+ }
+
+ override def hookOnNext(value: Long): Unit = ()
+ })) shouldBe a[Failure[_]]
+ flag.get() shouldBe false
+ }
+ "emit onError during onRequest if the flag is true" in {
+ val flag = new AtomicBoolean(false)
+ val flux = SFlux.raiseError(new RuntimeException(), whenRequested = true)
+ .doOnRequest(_ => flag.compareAndSet(false, true))
+ Try(flux.subscribe(new BaseSubscriber[Long] {
+ override def hookOnSubscribe(subscription: Subscription): Unit = {
+ subscription.request(1)
+ ()
+ }
+
+ override def hookOnNext(value: Long): Unit = ()
+ })) shouldBe a[Failure[_]]
+ flag.get() shouldBe true
+ }
+ }
+ }
+
+ ".range should emit int within the range" in {
+ StepVerifier.create(SFlux.range(10, 5))
+ .expectNext(10, 11, 12, 13, 14)
+ .verifyComplete()
+ }
+
+ ".using" - {
+ "without eager flag should produce some data" in {
+ val tempFile = Files.createTempFile("fluxtest-", ".tmp")
+ tempFile.toFile.deleteOnExit()
+ new PrintWriter(tempFile.toFile) {
+ write(s"1${sys.props("line.separator")}2")
+ flush()
+ close()
+ }
+ StepVerifier.create(
+ SFlux.using[String, File](() => tempFile.toFile, (file: File) => SFlux.fromIterable[String](Source.fromFile(file).getLines().toIterable), (file: File) => {
+ file.delete()
+ ()
+ }))
+ .expectNext("1", "2")
+ .verifyComplete()
+ }
+ "with eager flag should produce some data" in {
+ val tempFile = Files.createTempFile("fluxtest-", ".tmp")
+ tempFile.toFile.deleteOnExit()
+ new PrintWriter(tempFile.toFile) {
+ write(s"1${sys.props("line.separator")}2")
+ flush()
+ close()
+ }
+ StepVerifier.create(
+ SFlux.using[String, File](() => tempFile.toFile, (file: File) => SFlux.fromIterable[String](Source.fromFile(file).getLines().toIterable), (file: File) => {
+ file.delete()
+ ()
+ }, eager = true))
+ .expectNext("1", "2")
+ .verifyComplete()
+ }
+ }
+
+ ".zip" - {
+ "with source1, source2 and combinator should combine the data" in {
+ val flux = SFlux.zipMap(SFlux.just(1, 2, 3), SFlux.just("one", "two", "three"), (i: Int, str: String) => s"$i-$str")
+ StepVerifier.create(flux)
+ .expectNext("1-one", "2-two", "3-three")
+ .verifyComplete()
+ }
+ "with source1 and source2 should emit flux with tuple2" in {
+ StepVerifier.create(SFlux.zip(SFlux.just(1, 2, 3), SFlux.just("one", "two", "three")))
+ .expectNext((1, "one"), (2, "two"), (3, "three"))
+ .verifyComplete()
+ }
+ "with source1, source2, source3 should emit flux with tuple3" in {
+ StepVerifier.create(SFlux.zip3(SFlux.just(1, 2, 3), SFlux.just("one", "two", "three"), SFlux.just(1l, 2l, 3l)))
+ .expectNext((1, "one", 1l), (2, "two", 2l), (3, "three", 3l))
+ .verifyComplete()
+ }
+ "with source1, source2, source3, source4 should emit flux with tuple4" in {
+ StepVerifier.create(SFlux.zip4(SFlux.just(1, 2, 3), SFlux.just("one", "two", "three"), SFlux.just(1l, 2l, 3l), SFlux.just(BigDecimal("1"), BigDecimal("2"), BigDecimal("3"))))
+ .expectNext((1, "one", 1l, BigDecimal("1")), (2, "two", 2l, BigDecimal("2")), (3, "three", 3l, BigDecimal("3")))
+ .verifyComplete()
+ }
+ "with source1, source2, source3, source4, source5 should emit flux with tuple5" in {
+ StepVerifier.create(SFlux.zip5(SFlux.just(1, 2, 3), SFlux.just("one", "two", "three"), SFlux.just(1l, 2l, 3l), SFlux.just(BigDecimal("1"), BigDecimal("2"), BigDecimal("3")), SFlux.just("a", "i", "u")))
+ .expectNext((1, "one", 1l, BigDecimal("1"), "a"), (2, "two", 2l, BigDecimal("2"), "i"), (3, "three", 3l, BigDecimal("3"), "u"))
+ .verifyComplete()
+ }
+ "with source1, source2, source3, source4, source5, source6 should emit flux with tuple6" in {
+ StepVerifier.create(SFlux.zip6(SFlux.just(1, 2, 3), SFlux.just("one", "two", "three"), SFlux.just(1l, 2l, 3l), SFlux.just(BigDecimal("1"), BigDecimal("2"), BigDecimal("3")), SFlux.just("a", "i", "u"), SFlux.just("a", "b", "c")))
+ .expectNext((1, "one", 1l, BigDecimal("1"), "a", "a"), (2, "two", 2l, BigDecimal("2"), "i", "b"), (3, "three", 3l, BigDecimal("3"), "u", "c"))
+ .verifyComplete()
+ }
+ "with iterable and combinator should emit flux of combined data" in {
+ StepVerifier.create(SFlux.zipMapIterable[String](Iterable(SFlux.just(1, 2, 3), SFlux.just("one", "two", "three")), (array: Array[_]) => s"${array(0)}-${array(1)}"))
+ .expectNext("1-one", "2-two", "3-three")
+ .verifyComplete()
+ }
+ "with iterable, prefetch and combinator should emit flux of combined data" in {
+ StepVerifier.create(SFlux.zipMapIterable[String](Iterable(SFlux.just(1, 2, 3), SFlux.just("one", "two", "three")), (array: Array[_]) => s"${array(0)}-${array(1)}", 2))
+ .expectNext("1-one", "2-two", "3-three")
+ .verifyComplete()
+ }
+ "with combinator and varargs publisher should emit flux of combined data" in {
+ StepVerifier.create(SFlux.zipMap((array: Array[AnyRef]) => s"${array(0)}-${array(1)}", Seq(SFlux.just(1, 2, 3), SFlux.just(10, 20, 30))))
+ .expectNext("1-10", "2-20", "3-30")
+ .verifyComplete()
+ }
+ "with combinator, prefetch and varargs publisher should emit flux of combined data" in {
+ StepVerifier.create(SFlux.zipMap((array: Array[AnyRef]) => s"${array(0)}-${array(1)}", Seq(SFlux.just(1, 2, 3), SFlux.just(10, 20, 30)), 2))
+ .expectNext("1-10", "2-20", "3-30")
+ .verifyComplete()
+ }
+ }
+
+ ".all should check every single element satisfy the predicate" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).all(i => i > 0))
+ .expectNext(true)
+ .verifyComplete()
+ }
+
+ ".any should check that there is at least one element satisfy the predicate" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).any(i => i % 2 == 0))
+ .expectNext(true)
+ .verifyComplete()
+ }
+
+ ".as should transform this flux to another publisher" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).as(SMono.fromPublisher))
+ .expectNext(1)
+ .verifyComplete()
+ }
+
+ ".blockFirst" - {
+ "should block and return the first element" in {
+ val element = SFlux.just(1, 2, 3).blockFirst()
+ element shouldBe Option(1)
+ }
+ "with duration should wait up to maximum provided duration" in {
+ val element = SFlux.just(1, 2, 3).blockFirst(Duration(10, "seconds"))
+ element shouldBe Option(1)
+ }
+ }
+
+ ".blockLast" - {
+ "should block and return the last element" in {
+ val element = SFlux.just(1, 2, 3).blockLast()
+ element shouldBe Option(3)
+ }
+ "with duration should wait up to the maximum provided duration to get the last element" in {
+ val element = SFlux.just(1, 2, 3).blockLast(10 seconds)
+ element shouldBe Option(3)
+ }
+ }
+
+ ".buffer" - {
+ "should buffer all element into a Seq" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).buffer())
+ .expectNext(Seq(1, 2, 3))
+ .verifyComplete()
+ }
+ "with maxSize should buffer element into a batch of Seqs" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).buffer(2))
+ .expectNext(Seq(1, 2), Seq(3))
+ .verifyComplete()
+ }
+ "with maxSize and sequence supplier should buffer element into a batch of sequences provided by supplier" in {
+ val seqSet = mutable.Set[mutable.ListBuffer[Int]]()
+ val flux = SFlux.just(1, 2, 3).buffer(2, () => {
+ val seq = mutable.ListBuffer[Int]()
+ seqSet += seq
+ seq
+ })
+ StepVerifier.create(flux)
+ .expectNextMatches((seq: Seq[Int]) => {
+ seq shouldBe Seq(1, 2)
+ seqSet should contain(seq)
+ true
+ })
+ .expectNextMatches((seq: Seq[Int]) => {
+ seq shouldBe Seq(3)
+ seqSet should contain(seq)
+ true
+ })
+ .verifyComplete()
+ }
+ "with maxSize and skip" - {
+ val originalFlux = Flux.just(1, 2, 3, 4, 5)
+ val data = Table(
+ ("scenario", "maxSize", "skip", "expectedSequence"),
+ ("maxSize < skip", 2, 3, Iterable(ListBuffer(1, 2), ListBuffer(4, 5))),
+ ("maxSize > skip", 3, 2, Iterable(ListBuffer(1, 2, 3), ListBuffer(3, 4, 5), ListBuffer(5))),
+ ("maxSize = skip", 2, 2, Iterable(ListBuffer(1, 2), ListBuffer(3, 4), ListBuffer(5)))
+ )
+ forAll(data) { (scenario, maxSize, skip, expectedSequence) => {
+ s"when $scenario" in {
+ val flux = originalFlux.buffer(maxSize, skip)
+ StepVerifier.create(flux)
+ .expectNextSequence(expectedSequence)
+ .verifyComplete()
+ }
+ }
+ }
+ }
+ "with maxSize, skip and buffer supplier" - {
+ val data = Table(
+ ("scenario", "maxSize", "skip", "expectedSequence"),
+ ("maxSize < skip", 1, 2, Iterable(ListBuffer(1), ListBuffer(3), ListBuffer(5))),
+ ("maxSize > skip", 3, 2, Iterable(ListBuffer(1, 2, 3), ListBuffer(3, 4, 5), ListBuffer(5))),
+ ("maxSize = skip", 2, 2, Iterable(ListBuffer(1, 2), ListBuffer(3, 4), ListBuffer(5)))
+ )
+ forAll(data) { (scenario, maxSize, skip, expectedSequence) => {
+ val iterator = expectedSequence.iterator
+ s"when $scenario" in {
+ val originalFlux = Flux.just(1, 2, 3, 4, 5)
+ val seqSet = mutable.Set[mutable.ListBuffer[Int]]()
+ val flux = originalFlux.buffer(maxSize, skip, () => {
+ val seq = mutable.ListBuffer[Int]()
+ seqSet += seq
+ seq
+ })
+ StepVerifier.create(flux)
+ .expectNextMatches((seq: Seq[Int]) => {
+ seq shouldBe iterator.next()
+ true
+ })
+ .expectNextMatches((seq: Seq[Int]) => {
+ seq shouldBe iterator.next()
+ true
+ })
+ .expectNextMatches((seq: Seq[Int]) => {
+ seq shouldBe iterator.next()
+ true
+ })
+ .verifyComplete()
+ iterator.hasNext shouldBe false
+ }
+ }
+ }
+ }
+
+ "with timespan should split values every timespan" in {
+ StepVerifier.withVirtualTime(() => SFlux.interval(1 second).take(5).bufferTimeSpan(1500 milliseconds)())
+ .thenAwait(5 seconds)
+ .expectNext(Seq(0L), Seq(1L), Seq(2L, 3L), Seq(4L))
+ .verifyComplete()
+ }
+
+ val data = Table(
+ ("scenario", "timespan", "timeshift", "expected"),
+ ("timeshift > timespan", 1500 milliseconds, 2 seconds, Seq(Seq(0l), Seq(1l, 2l), Seq(3l, 4l))),
+ ("timeshift < timespan", 1500 milliseconds, 1 second, Seq(Seq(0l), Seq(0l, 1l), Seq(1l, 2l), Seq(2l, 3l), Seq(3l, 4l), Seq(4l))),
+ ("timeshift = timespan", 1500 milliseconds, 1500 milliseconds, Seq(Seq(0l), Seq(1l), Seq(2l, 3l), Seq(4l)))
+ )
+ "with duration and timeshift duration should split the values every timespan" in {
+ forAll(data) { (_, timespan, timeshift, expected) => {
+ StepVerifier.withVirtualTime(() => SFlux.interval(1 second).take(5).bufferTimeSpan(timespan)(timeshift))
+ .thenAwait(5 seconds)
+ .expectNext(expected: _*)
+ .verifyComplete()
+ }
+ }
+ }
+ "with other publisher should split the incoming value" in {
+ StepVerifier.withVirtualTime(() => Flux.just(1, 2, 3, 4, 5, 6, 7, 8).delayElements(1 second).buffer(Flux.interval(3 seconds)))
+ .thenAwait(9 seconds)
+ .expectNext(Seq(1, 2), Seq(3, 4, 5), Seq(6, 7, 8))
+ .verifyComplete()
+ }
+ "with other publisher and buffer supplier" in {
+ val buffer = ListBuffer.empty[ListBuffer[Int]]
+ StepVerifier.withVirtualTime(() => Flux.just(1, 2, 3, 4, 5, 6, 7, 8).delayElements(1 second).buffer(Flux.interval(3 seconds), () => {
+ val buff = ListBuffer.empty[Int]
+ buffer += buff
+ buff
+ }))
+ .thenAwait(9 seconds)
+ .expectNext(Seq(1, 2), Seq(3, 4, 5), Seq(6, 7, 8))
+ .verifyComplete()
+ buffer shouldBe Seq(Seq(1, 2), Seq(3, 4, 5), Seq(6, 7, 8))
+ }
+ }
+
+ ".bufferTimeout" - {
+ "with maxSize and duration should split values every duration or after maximum has been reached" in {
+ StepVerifier.withVirtualTime(() => SFlux.interval(1 second).take(5).bufferTimeout(3, 1200 milliseconds))
+ .thenAwait(5 seconds)
+ .expectNext(Seq(0l, 1), Seq(2l, 3), Seq(4l))
+ .verifyComplete()
+ }
+ }
+
+ ".bufferUntil" - {
+ "should buffer until predicate expression returns true" in {
+ StepVerifier.withVirtualTime(() => SFlux.interval(1 second).take(5).bufferUntil(l => l % 3 == 0))
+ .thenAwait(5 seconds)
+ .expectNext(Seq(0l), Seq(1l, 2l, 3l), Seq(4l))
+ .verifyComplete()
+ }
+ "with cutBefore should control if the value that trigger the predicate be included in the previous or after sequence" in {
+ StepVerifier.withVirtualTime(() => SFlux.interval(1 second).take(5).bufferUntil(l => l % 3 == 0, cutBefore = true))
+ .thenAwait(5 seconds)
+ .expectNext(Seq(0L, 1L, 2L), Seq(3L, 4L))
+ .verifyComplete()
+ }
+ }
+
+ ".bufferWhen" - {
+ "should buffer with opening and closing publisher" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3, 4, 5, 6, 7, 8, 9).delayElements(1 second)
+ .bufferWhen(Flux.interval(3 seconds), (_: Long) => SFlux.interval(3 seconds)))
+ .thenAwait(9 seconds)
+ .expectNext(Seq(3, 4, 5), Seq(6, 7, 8), Seq(9))
+ .verifyComplete()
+ }
+ "with buffer supplier should buffer with opening and closing publisher and use the provided supplier" in {
+ val buffer = ListBuffer.empty[ListBuffer[Int]]
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3, 4, 5, 6, 7, 8, 9).delayElements(1 second)
+ .bufferWhen(SFlux.interval(3 seconds), (_: Long) => SFlux.interval(3 seconds), () => {
+ val buff = ListBuffer.empty[Int]
+ buffer += buff
+ buff
+ }))
+ .thenAwait(9 seconds)
+ .expectNext(Seq(3, 4, 5), Seq(6, 7, 8), Seq(9))
+ .verifyComplete()
+
+ buffer shouldBe Seq(Seq(3, 4, 5), Seq(6, 7, 8), Seq(9))
+ }
+ }
+
+ ".bufferWhile should buffer while the predicate is true" in {
+ StepVerifier.withVirtualTime(() => SFlux.interval(1 second).take(10).bufferWhile(l => l % 2 == 0 || l % 3 == 0))
+ .thenAwait(10 seconds)
+ .expectNext(Seq(0L), Seq(2L, 3L, 4L), Seq(6L), Seq(8L, 9L))
+ .verifyComplete()
+ }
+
+ ".cache" - {
+ "should turn this into a hot source" in {
+ val flux = SFlux.just(1, 2, 3).cache()
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+ "with history should just retain up to history" in {
+ val flux = SFlux.just(1, 2, 3).cache(2)
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ StepVerifier.create(flux)
+ .expectNext(2, 3)
+ .verifyComplete()
+ }
+ "with ttl should retain the cache as long as the provided duration" in {
+ try {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3).delayElements(1 second).cache(ttl = 2 seconds))
+ .thenAwait(3 seconds)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ } finally {
+ VirtualTimeScheduler.reset()
+ }
+
+ }
+ "with history and ttl should retain the cache up to ttl and max history" in {
+ val supplier = () => {
+ val tested = SFlux.just(1, 2, 3).cache(2, 10 seconds)
+ tested.subscribe()
+ tested
+ }
+ StepVerifier.withVirtualTime(supplier)
+ .thenAwait(5 seconds)
+ .expectNext(2, 3)
+ .verifyComplete()
+ }
+ }
+
+ ".cast should cast the underlying value to a different type" in {
+ val number = SFlux.just(BigDecimal("1"), BigDecimal("2"), BigDecimal("3")).cast[ScalaNumber].blockLast()
+ number.get shouldBe a[ScalaNumber]
+ }
+
+ ".collect should collect the value into the supplied container" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).collect[ListBuffer[Int]](() => ListBuffer.empty, (buffer, v) => buffer += v))
+ .expectNext(ListBuffer(1, 2, 3))
+ .verifyComplete()
+ }
+
+ ".collectList should collect the value into a sequence" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).collectSeq())
+ .expectNext(Seq(1, 2, 3))
+ .verifyComplete()
+ }
+
+ ".collectMap" - {
+ "with keyExtractor should collect the value and extract the key to return as Map" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).collectMap(i => i + 5))
+ .expectNext(Map((6, 1), (7, 2), (8, 3)))
+ .verifyComplete()
+ }
+ "with keyExtractor and valueExtractor should collect the value, extract the key and value from it" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).collectMap(i => i + 5, i => i + 6))
+ .expectNext(Map((6, 7), (7, 8), (8, 9)))
+ .verifyComplete()
+ }
+ "with keyExtractor, valueExtractor and mapSupplier should collect value, extract the key and value from it and put in the provided map" in {
+ val map = mutable.HashMap[Int, Int]()
+ StepVerifier.create(SFlux.just(1, 2, 3).collectMap(i => i + 5, i => i + 6, () => map))
+ .expectNextMatches((m: Map[Int, Int]) => m == Map((6, 7), (7, 8), (8, 9)) && m == map)
+ .verifyComplete()
+ }
+ }
+
+ ".collectMultimap" - {
+ "with keyExtractor should group the value based on the keyExtractor" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5, 6, 7, 8, 9, 10).collectMultimap(i => i % 3))
+ .expectNext(Map((0, Seq(3, 6, 9)), (1, Seq(1, 4, 7, 10)), (2, Seq(2, 5, 8))))
+ .verifyComplete()
+ }
+ "with keyExtractor and valueExtractor should collect the value, extract the key and value from it" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5, 6, 7, 8, 9, 10).collectMultimap(i => i % 3, i => i + 6))
+ .expectNext(Map((0, Seq(9, 12, 15)), (1, Seq(7, 10, 13, 16)), (2, Seq(8, 11, 14))))
+ .verifyComplete()
+ }
+ "with keyExtractor, valueExtractor and map supplier should collect the value, extract the key and value from it and put in the provided map" in {
+ val map = mutable.HashMap[Int, util.Collection[Int]]()
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5, 6, 7, 8, 9, 10).collectMultimap(i => i % 3, i => i + 6, () => map))
+ .expectNextMatches((m: Map[Int, Traversable[Int]]) => {
+ m shouldBe map.mapValues(vs => vs.toArray().toSeq)
+ m shouldBe Map((0, Seq(9, 12, 15)), (1, Seq(7, 10, 13, 16)), (2, Seq(8, 11, 14)))
+ true
+ })
+ .verifyComplete()
+ }
+ }
+
+ ".collectSortedSeq" - {
+ "should collect and sort the elements" in {
+ StepVerifier.create(SFlux.just(5, 2, 3, 1, 4).collectSortedSeq())
+ .expectNext(Seq(1, 2, 3, 4, 5))
+ .verifyComplete()
+ }
+ "with ordering should collect and sort the elements based on the provided ordering" in {
+ StepVerifier.create(SFlux.just(2, 3, 1, 4, 5).collectSortedSeq(new IntOrdering {
+ override def compare(x: Int, y: Int): Int = Ordering.Int.compare(x, y) * -1
+ }))
+ .expectNext(Seq(5, 4, 3, 2, 1))
+ .verifyComplete()
+ }
+ }
+
+ ".compose should defer transformation of this flux to another publisher" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).compose(Mono.from))
+ .expectNext(1)
+ .verifyComplete()
+ }
+
+ ".concatMap" - {
+ "with mapper should map the element sequentially" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).concatMap(i => SFlux.just(i * 2, i * 3)))
+ .expectNext(2, 3, 4, 6, 6, 9)
+ .verifyComplete()
+ }
+ "with mapper and prefetch should map the element sequentially" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).concatMap(i => SFlux.just(i * 2, i * 3), 2))
+ .expectNext(2, 3, 4, 6, 6, 9)
+ .verifyComplete()
+ }
+ }
+
+ ".concatMapDelayError" - {
+ "with mapper, delayUntilEnd and prefetch" in {
+ val flux = SFlux.just(1, 2, 3).concatMapDelayError(i => {
+ if (i == 2) SFlux.raiseError[Int](new RuntimeException("runtime ex"))
+ else SFlux.just(i * 2, i * 3)
+ }, delayUntilEnd = true, 2)
+ StepVerifier.create(flux)
+ .expectNext(2, 3, 6, 9)
+ .expectError(classOf[RuntimeException])
+ .verify()
+ }
+ }
+
+ ".concatMapIterable" - {
+ "with mapper should concat and map an iterable" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).concatMapIterable(i => Iterable(i * 2, i * 3)))
+ .expectNext(2, 3, 4, 6, 6, 9)
+ .verifyComplete()
+ }
+ "with mapper and prefetch should concat and map an iterable" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).concatMapIterable(i => Iterable(i * 2, i * 3), 2))
+ .expectNext(2, 3, 4, 6, 6, 9)
+ .verifyComplete()
+ }
+ }
+
+ ".concatWith should concatenate with another publisher" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).concatWith(SFlux.just(6, 7, 8)))
+ .expectNext(1, 2, 3, 6, 7, 8)
+ .verifyComplete()
+ }
+
+ ".count should return Mono which emit the number of value in this flux" in {
+ StepVerifier.create(SFlux.just(10, 9, 8).count())
+ .expectNext(3)
+ .verifyComplete()
+ }
+
+ ".defaultIfEmpty should use the provided default value if the SFlux is empty" in {
+ StepVerifier.create(SFlux.empty[Int].defaultIfEmpty(-1))
+ .expectNext(-1)
+ .verifyComplete()
+ }
+
+ ".delayElement should delay every elements by provided delay in Duration" in {
+ try {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3).delayElements(1 second).elapsed())
+ .thenAwait(3 seconds)
+ .expectNext((1000L, 1), (1000L, 2), (1000L, 3))
+ .verifyComplete()
+ } finally {
+ VirtualTimeScheduler.reset()
+ }
+ }
+
+ ".delaySequence" - {
+ "should delay the element but not subscription" in {
+ StepVerifier.withVirtualTime[(Long, (Long, Int))](() => SFlux.fromPublisher(SFlux.just[Int](1, 2, 3).delayElements(100 milliseconds).elapsed()).delaySequence(1 seconds).elapsed())
+ .thenAwait(1300 milliseconds)
+ .expectNext((1100l, (100l, 1)), (100l, (100l, 2)), (100l, (100l, 3)))
+ .verifyComplete()
+ }
+ "with scheduler should use the scheduler" in {
+ StepVerifier.withVirtualTime[(Long, (Long, Int))](() => SFlux.fromPublisher(SFlux.just[Int](1, 2, 3).delayElements(100 milliseconds).elapsed()).delaySequence(1 seconds, VirtualTimeScheduler.getOrSet()).elapsed())
+ .thenAwait(1300 milliseconds)
+ .expectNext((1100l, (100l, 1)), (100l, (100l, 2)), (100l, (100l, 3)))
+ .verifyComplete()
+ }
+ }
+
+ ".dematerialize should dematerialize the underlying flux" in {
+ StepVerifier.create(SFlux.just(Signal.next(1), Signal.next(2)).dematerialize())
+ .expectNext(1, 2)
+ .verifyComplete
+ }
+
+ ".delaySubscription" - {
+ "with delay duration should delay subscription as long as the provided duration" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3).delaySubscription(1 hour))
+ .thenAwait(1 hour)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+ "with another publisher should delay the current subscription until the other publisher completes" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3).delaySubscription(SMono.just("one").delaySubscription(1 hour)))
+ .thenAwait(1 hour)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+
+ }
+ }
+
+ ".distinct" - {
+ "should make the flux distinct" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 2, 4, 3, 6).distinct())
+ .expectNext(1, 2, 3, 4, 6)
+ .verifyComplete()
+ }
+ "with keySelector should make the flux distinct by using the keySelector" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5, 6, 7, 8, 9).distinct(i => i % 3))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+ }
+
+ ".distinctUntilChanged" - {
+ "should make the flux always return different subsequent value" in {
+ StepVerifier.create(SFlux.just(1, 2, 2, 3, 3, 3, 3, 2, 2, 5).distinctUntilChanged())
+ .expectNext(1, 2, 3, 2, 5)
+ .verifyComplete()
+ }
+ "with keySelector should make the flux always return different subsequent value based on keySelector" in {
+ StepVerifier.create(SFlux.just(1, 2, 5, 8, 7, 4, 9, 6, 7).distinctUntilChanged(i => i % 3))
+ .expectNext(1, 2, 7, 9, 7)
+ .verifyComplete()
+ }
+ "with keySelector and keyComparator" in {
+ StepVerifier.create(SFlux.just(1, 2, 5, 8, 7, 4, 9, 6, 7).distinctUntilChanged(i => i % 3, (x: Int, y: Int) => x == y))
+ .expectNext(1, 2, 7, 9, 7)
+ .verifyComplete()
+ }
+ }
+
+ ".doAfterTerminate should perform an action after it is terminated" in {
+ val flag = new AtomicBoolean(false)
+ val flux = SFlux.just(1, 2, 3).doAfterTerminate(() => {
+ flag.compareAndSet(false, true)
+ ()
+ })
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ flag shouldBe 'get
+ }
+
+ ".doOnCancel should perform an action after it is cancelled" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ val flux = SFlux.just(1, 2, 3).delayElements(1 minute)
+ .doOnCancel(() => {
+ atomicBoolean.compareAndSet(false, true) shouldBe true
+ ()
+ })
+
+ val subscriptionReference = new AtomicReference[Subscription]()
+ flux.subscribe(new BaseSubscriber[Int] {
+ override def hookOnSubscribe(subscription: Subscription): Unit = {
+ subscriptionReference.set(subscription)
+ subscription.request(3)
+ }
+
+ override def hookOnNext(value: Int): Unit = ()
+ })
+ subscriptionReference.get().cancel()
+ atomicBoolean shouldBe 'get
+ }
+
+ ".doOnComplete should perform action after the flux is completed" in {
+ val flag = new AtomicBoolean(false)
+ val flux = SFlux.just(1, 2, 3).doOnComplete(() => {
+ flag.compareAndSet(false, true) shouldBe true
+ ()
+ })
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ flag shouldBe 'get
+ }
+
+ ".doOnEach should perform an action for every signal" in {
+ val buffer = ListBuffer[String]()
+ val flux = SFlux.just(1, 2, 3).doOnEach(s => buffer += s"${s.getType.toString}-${s.get()}")
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ buffer shouldBe Seq("onNext-1", "onNext-2", "onNext-3", "onComplete-null")
+ }
+
+ ".doOnError" - {
+ "with callback function should call the callback function when the flux encounter error" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ StepVerifier.create(SFlux.raiseError(new RuntimeException())
+ .doOnError(_ => atomicBoolean.compareAndSet(false, true) shouldBe true))
+ .expectError(classOf[RuntimeException])
+ .verify()
+ }
+ "that check exception type should call the callback function when the flux encounter exception with the provided type" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ StepVerifier.create(SFlux.raiseError(new RuntimeException())
+ .doOnError { case _: RuntimeException => atomicBoolean.compareAndSet(false, true) shouldBe true })
+ .expectError(classOf[RuntimeException])
+ }
+ }
+
+ ".doOnNext should call the callback function when the flux emit data successfully" in {
+ val buffer = ListBuffer[Int]()
+ StepVerifier.create(SFlux.just(1, 2, 3)
+ .doOnNext(t => buffer += t))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ buffer shouldBe Seq(1, 2, 3)
+ }
+
+ ".doOnRequest should be called upon request" in {
+ val atomicLong = new AtomicLong(0)
+ val flux = SFlux.just[Long](1L)
+ .doOnRequest(l => atomicLong.compareAndSet(0, l))
+ flux.subscribe(new BaseSubscriber[Long] {
+ override def hookOnSubscribe(subscription: Subscription): Unit = {
+ subscription.request(1)
+ ()
+ }
+ })
+ atomicLong.get() shouldBe 1
+ }
+
+ ".doOnSubscribe should be called upon subscribe" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ StepVerifier.create(Flux.just[Long](1L)
+ .doOnSubscribe(_ => atomicBoolean.compareAndSet(false, true)))
+ .expectNextCount(1)
+ .verifyComplete()
+ atomicBoolean shouldBe 'get
+ }
+
+ ".doOnTerminate should do something on terminate" in {
+ val flag = new AtomicBoolean(false)
+ StepVerifier.create(SFlux.just(1, 2, 3).doOnTerminate { () => flag.compareAndSet(false, true) })
+ .expectNext(1, 2, 3)
+ .expectComplete()
+ .verify()
+ flag shouldBe 'get
+ }
+
+ ".doFinally should call the callback" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ StepVerifier.create(SFlux.just(1, 2, 3)
+ .doFinally(_ => atomicBoolean.compareAndSet(false, true) shouldBe true))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ atomicBoolean shouldBe 'get
+ }
+
+ ".drop should return Flux that drop a number of elements" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4).drop(2))
+ .expectNext(3, 4)
+ .verifyComplete()
+ }
+
+ ".elapsed" - {
+ "should provide the time elapse when this mono emit value" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3).delaySubscription(1 second).delayElements(1 second).elapsed(), 3)
+ .thenAwait(4 seconds)
+ .expectNextMatches(new Predicate[(Long, Int)] {
+ override def test(t: (Long, Int)): Boolean = t match {
+ case (time, data) => time >= 1000 && data == 1
+ }
+ })
+ .expectNextMatches(new Predicate[(Long, Int)] {
+ override def test(t: (Long, Int)): Boolean = t match {
+ case (time, data) => time >= 1000 && data == 2
+ }
+ })
+ .expectNextMatches(new Predicate[(Long, Int)] {
+ override def test(t: (Long, Int)): Boolean = t match {
+ case (time, data) => time >= 1000 && data == 3
+ }
+ })
+ .verifyComplete()
+ }
+ "with Scheduler should provide the time elapsed using the provided scheduler when this mono emit value" in {
+ val virtualTimeScheduler = VirtualTimeScheduler.getOrSet()
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3)
+ .delaySubscription(1 second, virtualTimeScheduler)
+ .delayElements(1 second, virtualTimeScheduler)
+ .elapsed(virtualTimeScheduler), 3)
+ .`then`(() => virtualTimeScheduler.advanceTimeBy(4 seconds))
+ .expectNextMatches(new Predicate[(Long, Int)] {
+ override def test(t: (Long, Int)): Boolean = t match {
+ case (time, data) => time >= 1000 && data == 1
+ }
+ })
+ .expectNextMatches(new Predicate[(Long, Int)] {
+ override def test(t: (Long, Int)): Boolean = t match {
+ case (time, data) => time >= 1000 && data == 2
+ }
+ })
+ .expectNextMatches(new Predicate[(Long, Int)] {
+ override def test(t: (Long, Int)): Boolean = t match {
+ case (time, data) => time >= 1000 && data == 3
+ }
+ })
+ .verifyComplete()
+ }
+ }
+
+ ".elementAt" - {
+ "should emit only the element at given index position" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).elementAt(2))
+ .expectNext(3)
+ .verifyComplete()
+ }
+ "should emit only the element at given index position or default value if the sequence is shorter" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4).elementAt(10, Option(-1)))
+ .expectNext(-1)
+ .verifyComplete()
+ }
+ }
+
+ ".expandDeep" - {
+ "should expand the flux" in {
+ StepVerifier.create(SFlux.just("a", "b").expandDeep(s => SFlux.just(s"$s$s", s"$s$s$s")).take(5))
+ .expectNext("a", "aa", "aaaa", "aaaaaaaa", "aaaaaaaaaaaaaaaa")
+ .verifyComplete()
+ }
+ " with capacity hint should expand the flux" in {
+ StepVerifier.create(SFlux.just("a", "b").expandDeep(s => SFlux.just(s"$s$s", s"$s$s$s"), 10).take(5))
+ .expectNext("a", "aa", "aaaa", "aaaaaaaa", "aaaaaaaaaaaaaaaa")
+ .verifyComplete()
+ }
+ }
+
+ ".expand" - {
+ "should expand the flux" in {
+ StepVerifier.create(SFlux.just("a", "b").expand(s => SFlux.just(s"$s$s", s"$s$s$s")).take(10))
+ .expectNext("a", "b", "aa", "aaa", "bb", "bbb", "aaaa", "aaaaaa", "aaaaaa", "aaaaaaaaa")
+ .verifyComplete()
+ }
+ " with capacity hint should expand the flux" in {
+ StepVerifier.create(SFlux.just("a", "b").expand(s => SFlux.just(s"$s$s", s"$s$s$s"), 5).take(10))
+ .expectNext("a", "b", "aa", "aaa", "bb", "bbb", "aaaa", "aaaaaa", "aaaaaa", "aaaaaaaaa")
+ .verifyComplete()
+ }
+ }
+
+ ".filter should evaluate each value against given predicate" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).filter(i => i > 1))
+ .expectNext(2, 3)
+ .verifyComplete()
+ }
+
+ ".filterWhen" - {
+ "should replay the value of mono if the first item emitted by the test is true" in {
+ StepVerifier.create(SFlux.just(10, 20, 30).filterWhen((i: Int) => SMono.just(i % 2 == 0)))
+ .expectNext(10, 20, 30)
+ .verifyComplete()
+ }
+ "with bufferSize should replay the value of mono if the first item emitted by the test is true" in {
+ StepVerifier.create(SFlux.just(10, 20, 30).filterWhen((i: Int) => SMono.just(i % 2 == 0), 1))
+ .expectNext(10, 20, 30)
+ .verifyComplete()
+ }
+ }
+
+ ".flatMap should transform signal emitted by this flux into publishers" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).flatMap(_ => SMono.just("next"), _ => SMono.just("error"), () => SMono.just("complete")))
+ .expectNext("next", "next", "next", "complete")
+ .verifyComplete()
+ }
+
+ ".flatMapIterable" - {
+ "should transform the items emitted by this flux into iterable" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).flatMapIterable(i => Iterable(i * 2, i * 3)))
+ .expectNext(2, 3, 4, 6, 6, 9)
+ .verifyComplete()
+ }
+ "with prefetch should transform the items and prefetch" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).flatMapIterable(i => Iterable(i * 2, i * 3), 2))
+ .expectNext(2, 3, 4, 6, 6, 9)
+ .verifyComplete()
+ }
+ }
+
+ ".flatMapSequential" - {
+ "should transform items emitted by this flux into publisher then flatten them, in order" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).flatMapSequential(i => SFlux.just(i * 2, i * 3)))
+ .expectNext(2, 3, 4, 6, 6, 9)
+ .verifyComplete()
+ }
+ "with maxConcurrency, should do the same as before just with provided maxConcurrency" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).flatMapSequential(i => SFlux.just(i * 2, i * 3), 2))
+ .expectNext(2, 3, 4, 6, 6, 9)
+ .verifyComplete()
+ }
+ "with maxConcurrency and prefetch, should do the same as before just with provided maxConcurrency and prefetch" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).flatMapSequential(i => SFlux.just(i * 2, i * 3), 2, 2))
+ .expectNext(2, 3, 4, 6, 6, 9)
+ .verifyComplete()
+ }
+ "with delayError should respect whether error be delayed after current merge backlog" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).flatMapSequential(i => {
+ if (i == 2) Flux.error[Int](new RuntimeException("just an error"))
+ else Flux.just(i * 2, i * 3)
+ }, 2, 2, delayError = true))
+ .expectNext(2, 3, 6, 9)
+ .verifyError(classOf[RuntimeException])
+ }
+ }
+
+ ".flatten" - {
+ "with mapper should map the element sequentially" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).map(i => SFlux.just(i * 2, i * 3)).flatten)
+ .expectNext(2, 3, 4, 6, 6, 9)
+ .verifyComplete()
+ }
+ }
+
+ ".foldLeft should apply a binary operator to an initial value and all element of the source" in {
+ val mono = SFlux.just(1, 2, 3).foldLeft(0)((acc: Int, el: Int) => acc + el)
+ StepVerifier.create(mono)
+ .expectNext(6)
+ .verifyComplete()
+ }
+
+ ".groupBy" - {
+ "with keyMapper should group the flux by the key mapper" in {
+ val oddBuffer = ListBuffer.empty[Int]
+ val evenBuffer = ListBuffer.empty[Int]
+
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5, 6).groupBy {
+ case even: Int if even % 2 == 0 => "even"
+ case _: Int => "odd"
+ })
+ .expectNextMatches(new Predicate[GroupedFlux[String, Int]] {
+ override def test(t: GroupedFlux[String, Int]): Boolean = {
+ t.subscribe(oddBuffer += _)
+ t.key() == "odd"
+ }
+ })
+ .expectNextMatches(new Predicate[GroupedFlux[String, Int]] {
+ override def test(t: GroupedFlux[String, Int]): Boolean = {
+ t.subscribe(evenBuffer += _)
+ t.key() == "even"
+ }
+ })
+ .verifyComplete()
+
+ oddBuffer shouldBe Seq(1, 3, 5)
+ evenBuffer shouldBe Seq(2, 4, 6)
+ }
+ "with keyMapper and prefetch should group the flux by the key mapper and prefetch the elements from the source" in {
+ val oddBuffer = ListBuffer.empty[Int]
+ val evenBuffer = ListBuffer.empty[Int]
+
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5, 6).groupBy({
+ case even: Int if even % 2 == 0 => "even"
+ case _: Int => "odd"
+ }: Int => String, identity, 6))
+ .expectNextMatches(new Predicate[GroupedFlux[String, Int]] {
+ override def test(t: GroupedFlux[String, Int]): Boolean = {
+ t.subscribe(oddBuffer += _)
+ t.key() == "odd"
+ }
+ })
+ .expectNextMatches(new Predicate[GroupedFlux[String, Int]] {
+ override def test(t: GroupedFlux[String, Int]): Boolean = {
+ t.subscribe(evenBuffer += _)
+ t.key() == "even"
+ }
+ })
+ .verifyComplete()
+
+ oddBuffer shouldBe Seq(1, 3, 5)
+ evenBuffer shouldBe Seq(2, 4, 6)
+ }
+
+ "with keyMapper and valueMapper should group the flux by the key mapper and convert the value by value mapper" in {
+ val oddBuffer = ListBuffer.empty[String]
+ val evenBuffer = ListBuffer.empty[String]
+
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5, 6).groupBy[String, String]({
+ case even: Int if even % 2 == 0 => "even"
+ case _: Int => "odd"
+ }: Int => String, (i => i.toString): Int => String))
+ .expectNextMatches(new Predicate[GroupedFlux[String, String]] {
+ override def test(t: GroupedFlux[String, String]): Boolean = {
+ t.subscribe(oddBuffer += _)
+ t.key() == "odd"
+ }
+ })
+ .expectNextMatches(new Predicate[GroupedFlux[String, String]] {
+ override def test(t: GroupedFlux[String, String]): Boolean = {
+ t.subscribe(evenBuffer += _)
+ t.key() == "even"
+ }
+ })
+ .verifyComplete()
+
+ oddBuffer shouldBe Seq("1", "3", "5")
+ evenBuffer shouldBe Seq("2", "4", "6")
+ }
+
+ "with keyMapper, valueMapper and prefetch should do the above with prefetch" in {
+ val oddBuffer = ListBuffer.empty[String]
+ val evenBuffer = ListBuffer.empty[String]
+
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5, 6).groupBy[String, String]({
+ case even: Int if even % 2 == 0 => "even"
+ case _: Int => "odd"
+ }: Int => String, (i => i.toString): Int => String, 6))
+ .expectNextMatches(new Predicate[GroupedFlux[String, String]] {
+ override def test(t: GroupedFlux[String, String]): Boolean = {
+ t.subscribe(oddBuffer += _)
+ t.key() == "odd"
+ }
+ })
+ .expectNextMatches(new Predicate[GroupedFlux[String, String]] {
+ override def test(t: GroupedFlux[String, String]): Boolean = {
+ t.subscribe(evenBuffer += _)
+ t.key() == "even"
+ }
+ })
+ .verifyComplete()
+
+ oddBuffer shouldBe Seq("1", "3", "5")
+ evenBuffer shouldBe Seq("2", "4", "6")
+ }
+ }
+
+ ".handle should handle the values" in {
+ val buffer = ListBuffer.empty[Int]
+ val flux = SFlux.just(1, 2, 3, 4, 5, 6).handle[Seq[Int]] {
+ case (v, sink) =>
+ buffer += v
+ if (v == 6) {
+ sink.next(buffer)
+ sink.complete()
+ }
+ }
+ val expected = Seq(1, 2, 3, 4, 5, 6)
+ StepVerifier.create(flux)
+ .expectNext(expected)
+ .verifyComplete()
+ buffer shouldBe expected
+ }
+
+ ".hasElement should return true if the flux has element matched" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5).hasElement(4))
+ .expectNext(true)
+ .verifyComplete()
+ }
+
+ ".hasElements should return true if this flux has at least one element" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).hasElements)
+ .expectNext(true)
+ .verifyComplete()
+ }
+
+ ".head should return Mono that emit the first value of Flux" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).head)
+ .expectNext(1)
+ .verifyComplete()
+ }
+
+ ".ignoreElements should ignore all elements and only reacts on termination" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).ignoreElements())
+ .verifyComplete()
+ }
+
+ ".last" - {
+ "should give last element" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).last())
+ .expectNext(3)
+ .verifyComplete()
+ }
+ "with defaultValue should give the last element or defaultValue if the flux is empty" in {
+ StepVerifier.create(SFlux.empty[Int].last(Option(5)))
+ .expectNext(5)
+ .verifyComplete()
+ }
+ }
+
+ ".map should map the type of Flux from T to R" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).map(_.toString))
+ .expectNext("1", "2", "3")
+ .expectComplete()
+ .verify()
+ }
+
+ ".materialize should convert the flux into a flux that emit its signal" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).materialize())
+ .expectNext(Signal.next(1), Signal.next(2), Signal.next(3), Signal.complete[Int]())
+ .verifyComplete()
+ }
+
+ ".max" - {
+ "of numbers should emit the highest value of ordering" in {
+ StepVerifier.create(SFlux.just(4, 3, 6, 5, 8, 7).max)
+ .expectNext(Option(8))
+ .verifyComplete()
+ }
+ "of strings should emit the highest value of ordering" in {
+ StepVerifier.create(SFlux.just("d", "c", "g", "j", "i").max)
+ .expectNext(Option("j"))
+ .verifyComplete()
+ }
+ }
+
+ ".mergeWith should merge with the provided publisher so they may interleave" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 3, 5).delayElements(1 second)
+ .mergeWith(SFlux.just(2, 4, 6).delayElements(1 second).delaySubscription(500 milliseconds)))
+ .thenAwait(7 seconds)
+ .expectNext(1, 2, 3, 4, 5, 6)
+ .verifyComplete()
+ }
+
+ ".min" - {
+ "of numbers should emit the lowest value of ordering" in {
+ StepVerifier.create(SFlux.just(4, 3, 6, 5, 8).min)
+ .expectNext(Option(3))
+ .verifyComplete()
+ }
+ "of strings should emit the lowest value of ordering" in {
+ StepVerifier.create(SFlux.just("d", "c", "g", "j").min)
+ .expectNext(Option("c"))
+ .verifyComplete()
+ }
+ }
+
+ ".name should call the underlying Flux.name method" in {
+ val name = "one two three four"
+ val flux = SFlux.just(1, 2, 3, 4).name(name)
+ val scannable: Scannable = Scannable.from(Option(flux))
+ scannable.name shouldBe name
+ }
+
+ ".next should emit only the first item" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).next())
+ .expectNext(1)
+ .verifyComplete()
+ }
+
+ ".nonEmpty should return true if this flux has at least one element" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).nonEmpty)
+ .expectNext(true)
+ .verifyComplete()
+ }
+
+ ".ofType should filter the value emitted by this flux according to the class" in {
+ StepVerifier.create(SFlux.just(1, "2", "3", 4).ofType[String])
+ .expectNext("2", "3")
+ .verifyComplete()
+ }
+
+ ".onBackpressureBuffer" - {
+ "should call the underlying method" in {
+ val jFlux = spy(JFlux.just(1, 2, 3))
+ val flux = SFlux.fromPublisher(jFlux)
+ flux.onBackpressureBuffer()
+ verify(jFlux).onBackpressureBuffer()
+ }
+ "with maxSize should call the underlying method" in {
+ val jFlux = spy(JFlux.just(1, 2, 3))
+ val flux = SFlux.fromPublisher(jFlux)
+ flux.onBackpressureBuffer(5)
+ verify(jFlux).onBackpressureBuffer(5)
+ }
+ "with maxSize and onOverflow handler" in {
+ val jFlux = spy(JFlux.just(1, 2, 3))
+ val flux = SFlux.fromPublisher(jFlux)
+ flux.onBackpressureBuffer(5, _ => ())
+ verify(jFlux).onBackpressureBuffer(ArgumentMatchers.eq(5), ArgumentMatchers.any(classOf[Consumer[Int]]))
+ }
+ "with maxSize and overflow strategy" in {
+ val jFlux = spy(JFlux.just(1, 2, 3))
+ val flux = SFlux.fromPublisher(jFlux)
+ flux.onBackpressureBuffer(5, DROP_LATEST)
+ verify(jFlux).onBackpressureBuffer(5, DROP_LATEST)
+ }
+ "with maxSize, overflow handler and overflow strategy" in {
+ val jFlux = spy(JFlux.just(1, 2, 3))
+ val flux = SFlux.fromPublisher(jFlux)
+ flux.onBackpressureBuffer(5, _ => (), DROP_LATEST)
+ verify(jFlux).onBackpressureBuffer(ArgumentMatchers.eq(5), ArgumentMatchers.any(classOf[Consumer[Int]]), ArgumentMatchers.eq(DROP_LATEST))
+ }
+ }
+
+ ".onBackpressureDrop" - {
+ val jFlux = spy(JFlux.just(1, 2, 3))
+ val flux = SFlux.fromPublisher(jFlux)
+ "without consumer" in {
+ flux.onBackpressureDrop()
+ verify(jFlux).onBackpressureDrop()
+ }
+ "with consumer" in {
+ flux.onBackpressureDrop(_ => ())
+ verify(jFlux).onBackpressureDrop(ArgumentMatchers.any(classOf[Consumer[Int]]))
+ }
+ }
+
+ ".onBackpressureError" in {
+ val jFlux = spy(JFlux.just(1, 2, 3))
+ val flux = SFlux.fromPublisher(jFlux)
+ flux.onBackpressureError()
+ verify(jFlux).onBackpressureError()
+ }
+
+ ".onBackpressureLatest" in {
+ val jFlux = spy(JFlux.just(1, 2, 3))
+ val flux = SFlux.fromPublisher(jFlux)
+ flux.onBackpressureLatest()
+ verify(jFlux).onBackpressureLatest()
+ }
+
+ ".onErrorMap" - {
+ "with mapper should map the error" in {
+ StepVerifier.create(SFlux.raiseError[Int](new RuntimeException("runtime exception"))
+ .onErrorMap((t: Throwable) => new UnsupportedOperationException(t.getMessage)))
+ .expectError(classOf[UnsupportedOperationException])
+ .verify()
+ }
+
+ "with type and mapper should map the error if the error is of the provided type" in {
+ StepVerifier.create(SFlux.raiseError[Int](new RuntimeException("runtime ex"))
+ .onErrorMap { throwable: Throwable =>
+ throwable match {
+ case t: RuntimeException => new UnsupportedOperationException(t.getMessage)
+ }
+ })
+ .expectError(classOf[UnsupportedOperationException])
+ .verify()
+ }
+ }
+
+ ".onErrorRecover" - {
+ "should recover with a Flux of element that has been recovered" in {
+ val convoy = SFlux.just[Vehicle](Sedan(1), Sedan(2)).concatWith(SFlux.raiseError(new RuntimeException("oops")))
+ .onErrorRecover { case _ => Truck(5) }
+ StepVerifier.create(convoy)
+ .expectNext(Sedan(1), Sedan(2), Truck(5))
+ .verifyComplete()
+ }
+ }
+
+ ".onErrorRecoverWith" - {
+ "should recover with a Flux of element that is provided for recovery" in {
+ val convoy = SFlux.just[Vehicle](Sedan(1), Sedan(2)).concatWith(SFlux.raiseError(new RuntimeException("oops")))
+ .onErrorRecoverWith { case _ => SFlux.just(Truck(5)) }
+ StepVerifier.create(convoy)
+ .expectNext(Sedan(1), Sedan(2), Truck(5))
+ .verifyComplete()
+ }
+ }
+
+ ".onErrorResume" - {
+ "should resume with a fallback publisher when error happen" in {
+ StepVerifier.create(SFlux.just(1, 2).concatWith(SMono.raiseError(new RuntimeException("exception"))).onErrorResume((_: Throwable) => SFlux.just(10, 20, 30)))
+ .expectNext(1, 2, 10, 20, 30)
+ .verifyComplete()
+ }
+ }
+
+ ".onErrorReturn" - {
+ "should return the fallback value if error happen" in {
+ StepVerifier.create(SFlux.just(1, 2).concatWith(SMono.raiseError(new RuntimeException("exc"))).onErrorReturn(10))
+ .expectNext(1, 2, 10)
+ .verifyComplete()
+ }
+ "with predicate and fallbackValue should return the fallback value if the predicate is true" in {
+ val predicate = (_: Throwable).isInstanceOf[RuntimeException]
+ StepVerifier.create(SFlux.just(1, 2).concatWith(SMono.raiseError(new RuntimeException("exc")))
+ .onErrorReturn(10, predicate))
+ .expectNext(1, 2, 10)
+ .verifyComplete()
+ }
+ }
+
+ ".or should emit from the fastest first sequence" in {
+ StepVerifier.create(SFlux.just(10, 20, 30).or(SFlux.just(1, 2, 3).delayElements(1 second)))
+ .expectNext(10, 20, 30)
+ .verifyComplete()
+ }
+
+ ".publishNext should make this flux a hot mono" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).publishNext())
+ .expectNext(1)
+ .verifyComplete()
+ }
+
+ ".reduce" - {
+ "should aggregate the values" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).reduce(_ + _))
+ .expectNext(6)
+ .verifyComplete()
+ }
+ "with initial value should aggregate the values with initial one" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).reduce[String]("0", (agg, v) => s"$agg-${v.toString}"))
+ .expectNext("0-1-2-3")
+ .verifyComplete()
+ }
+ }
+
+ ".reduceWith should aggregate the values with initial one" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).reduceWith[String](() => "0", (agg, v) => s"$agg-${v.toString}"))
+ .expectNext("0-1-2-3")
+ .verifyComplete()
+ }
+
+ ".repeat" - {
+ "with predicate should repeat the subscription if the predicate returns true" in {
+ val counter = new AtomicInteger(0)
+ StepVerifier.create(SFlux.just(1, 2, 3).repeat(predicate = () => {
+ if (counter.getAndIncrement() == 0) true
+ else false
+ }))
+ .expectNext(1, 2, 3, 1, 2, 3)
+ .verifyComplete()
+ }
+ "with numRepeat should repeat as many as the provided parameter" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).repeat(3))
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+ "with numRepeat and predicate should repeat as many as provided parameter and as long as the predicate returns true" in {
+ val flux = SFlux.just(1, 2, 3).repeat(3, () => true)
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+ }
+
+ ".retry" - {
+ "with numRetries will retry a number of times according to provided parameter" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).concatWith(SMono.raiseError(new RuntimeException("ex"))).retry(3))
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .expectError(classOf[RuntimeException])
+ .verify()
+ }
+ "with predicate will retry until the predicate returns false" in {
+ val counter = new AtomicInteger(0)
+ StepVerifier.create(SFlux.just(1, 2, 3).concatWith(SMono.raiseError(new RuntimeException("ex"))).retry(retryMatcher = (_: Throwable) =>
+ if (counter.getAndIncrement() > 0) false
+ else true
+ ))
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .expectError(classOf[RuntimeException])
+ .verify()
+ }
+ "with numRetries and predicate should retry as many as provided numRetries and predicate returns true" in {
+ val counter = new AtomicInteger(0)
+ val flux = SFlux.just(1, 2, 3).concatWith(SMono.raiseError(new RuntimeException("ex"))).retry(3, { _ =>
+ if (counter.getAndIncrement() > 5) false
+ else true
+ })
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .expectError(classOf[RuntimeException])
+ .verify()
+ }
+ }
+
+ ".retryWhen should retry the companion publisher produces onNext signal" in {
+ val counter = new AtomicInteger(0)
+ val flux = SFlux.just(1, 2, 3).concatWith(SMono.raiseError(new RuntimeException("ex"))).retryWhen { _ =>
+ if (counter.getAndIncrement() > 0) SMono.raiseError[Int](new RuntimeException("another ex"))
+ else SMono.just(1)
+ }
+ StepVerifier.create(flux)
+ .expectNext(1, 2, 3)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+
+ ".sample should emit the last value for given interval" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3, 4, 5, 6).delayElements(1 second).sample(1500 milliseconds))
+ .thenAwait(6 seconds)
+ .expectNext(1, 2, 4, 5, 6)
+ .verifyComplete()
+ }
+
+ ".sampleFirst should emit the first value during the timespan" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3, 4, 5).delayElements(1 second).sampleFirst(1500 milliseconds))
+ .thenAwait(6 seconds)
+ .expectNext(1, 3, 5)
+ .verifyComplete()
+ }
+
+ ".scan" - {
+ "should scan the values of this flux" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4).scan { (a, b) => a * b })
+ .expectNext(1, 2, 6, 24)
+ .verifyComplete()
+ }
+ "with initial value should scan with provided initial value" in {
+ val flux = SFlux.just[Int](1, 2, 3, 4).scan[Int](2, { (a: Int, b: Int) => a * b })
+ StepVerifier.create(flux)
+ .expectNext(2, 2, 4, 12, 48)
+ .verifyComplete()
+ }
+ }
+
+ ".scanWith should scan with initial value" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4).scanWith[Int](() => 2, { (a, b) => a * b }))
+ .expectNext(2, 2, 4, 12, 48)
+ .verifyComplete()
+ }
+
+ ".single" - {
+ "should return a mono" in {
+ StepVerifier.create(SFlux.just(1).single())
+ .expectNext(1)
+ .verifyComplete()
+ }
+ "or emit onError with IndexOutOfBoundsException" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).single())
+ .expectError(classOf[IndexOutOfBoundsException])
+ .verify()
+ }
+ "with default value should return the default value if the flux is empty" in {
+ StepVerifier.create(SFlux.empty[Int].single(Option(2)))
+ .expectNext(2)
+ .verifyComplete()
+ }
+ }
+
+ ".singleOrEmpty should return mono with single value or empty" in {
+ StepVerifier.create(SFlux.just(3).singleOrEmpty())
+ .expectNext(3)
+ .verifyComplete()
+ }
+
+ ".skip" - {
+ "with the number to skip should skip some elements" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5).skip(2))
+ .expectNext(3, 4, 5)
+ .verifyComplete()
+ }
+ "with duration should skip all elements within that duration" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3, 4, 5).delayElements(1 second).skip(2 seconds))
+ .thenAwait(6 seconds)
+ .expectNext(2, 3, 4, 5)
+ .verifyComplete()
+ }
+ "with timer should skip all elements within the millis duration with the provided timer" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3, 4, 5).delayElements(1 second).skip(2 seconds, Schedulers.single()))
+ .thenAwait(6 seconds)
+ .expectNext(2, 3, 4, 5)
+ .verifyComplete()
+ }
+ }
+
+ ".skipLast should skip the last n elements" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5).skipLast(2))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+
+ ".skipUntil should skip until predicate matches" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5).skipUntil(t => t == 3))
+ .expectNext(3, 4, 5)
+ .verifyComplete()
+ }
+
+ ".skipWhile should skip while the predicate returns true" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5).skipWhile(t => t <= 3))
+ .expectNext(4, 5)
+ .verifyComplete()
+ }
+
+ ".sort" - {
+ "should sort the elements" in {
+ StepVerifier.create(SFlux.just(3, 4, 2, 5, 1, 6).sort())
+ .expectNext(1, 2, 3, 4, 5, 6)
+ .verifyComplete()
+ }
+ "with sort function should sort the elements based on the function" in {
+ StepVerifier.create(SFlux.just(3, 4, 2, 5, 1, 6).sort(new IntOrdering() {
+ override def compare(x: Int, y: Int): Int = super.compare(x, y)
+ }.reverse))
+ .expectNext(6, 5, 4, 3, 2, 1)
+ .verifyComplete()
+ }
+ }
+
+ ".startWith" - {
+ "with iterable should prepend the flux with the provided iterable elements" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).startWith(Iterable(10, 20, 30)))
+ .expectNext(10, 20, 30, 1, 2, 3)
+ .verifyComplete()
+ }
+ "with varargs should prepend the flux with the provided values" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).startWith(10, 20, 30))
+ .expectNext(10, 20, 30, 1, 2, 3)
+ .verifyComplete()
+ }
+ "with publisher should prepend the flux with the provided publisher" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).startWith(Flux.just(10, 20, 30)))
+ .expectNext(10, 20, 30, 1, 2, 3)
+ .verifyComplete()
+ }
+ }
+
+ ".sum should sum up all values at onComplete it emits the total, given the source that emit numeric values" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5).sum)
+ .expectNext(15)
+ .verifyComplete()
+ }
+
+ ".switchIfEmpty should switch if the current flux is empty" in {
+ StepVerifier.create(SFlux.empty[Int].switchIfEmpty(SFlux.just[Int](10, 20, 30)))
+ .expectNext(10, 20, 30)
+ .verifyComplete()
+ }
+
+ ".switchMap" - {
+ "with function should switch to the new publisher" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).switchMap(i => Flux.just(i * 10, i * 20)))
+ .expectNext(10, 20, 20, 40, 30, 60)
+ .verifyComplete()
+ }
+ "with function and prefetch should switch to the new publisher" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).switchMap(i => SFlux.just(i * 10, i * 20), 2))
+ .expectNext(10, 20, 20, 40, 30, 60)
+ .verifyComplete()
+ }
+ }
+
+ ".tag should tag the Flux and accessible from Scannable" in {
+ val flux = SFlux.just(1, 2, 3).tag("integer", "one, two, three")
+ Scannable.from(Option(flux)).tags shouldBe Stream("integer" -> "one, two, three")
+ }
+
+ ".tail should return flux that exclude the head" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5).tail)
+ .expectNext(2, 3, 4, 5)
+ .verifyComplete()
+ }
+
+ ".take" - {
+ "should emit only n values" in {
+ StepVerifier.create(SFlux(1, 2, 3, 4, 5, 6, 7, 8, 9, 10).take(3))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+ "with duration should only emit values during the provided duration" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3, 4, 5).delayElements(1 seconds).take(3500 milliseconds))
+ .thenAwait(5 seconds)
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+ "with timespan and timed scheduler should only emit values during the provided timespan with the provided TimedScheduler" in {
+ val vts = VirtualTimeScheduler.getOrSet()
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5)
+ .delayElements(1 second, vts)
+ .take(3500 milliseconds, vts), 256)
+ .`then`(() => vts.advanceTimeBy(5 seconds))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+ }
+
+ ".takeLast should take the last n values" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5).takeLast(3))
+ .expectNext(3, 4, 5)
+ .verifyComplete()
+ }
+
+ ".takeUntil should emit the values until the predicate returns true" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5).takeUntil(t => t >= 4))
+ .expectNext(1, 2, 3, 4)
+ .verifyComplete()
+ }
+
+ ".takeWhile should emit values until the predicate returns false" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5).takeWhile(t => t < 4))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+
+ ".then" - {
+ "without parameter should actively ignore the values" in {
+ StepVerifier.create(SFlux.just(1, 2, 3, 4, 5).`then`())
+ .verifyComplete()
+ }
+ }
+
+ ".thenEmpty should wait for this to complete and then for the supplied publisher to complete" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).thenEmpty(Mono.empty))
+ .verifyComplete()
+ }
+
+ ".thenMany" - {
+ "should emit the sequence of the supplied publisher" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).thenMany(SFlux.just("1", "2", "3")))
+ .expectNext("1", "2", "3")
+ .verifyComplete()
+ }
+ }
+
+ ".timeout" - {
+ "with timeout duration should throw exception if the item is not emitted within the provided duration after previous emited item" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3).delayElements(2 seconds).timeout(1 second))
+ .thenAwait(2 seconds)
+ .expectError(classOf[TimeoutException])
+ .verify()
+ }
+ "with timeout and optional fallback should fallback if the item is not emitted within the provided duration" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3).delayElements(2 seconds).timeout(1 second, Option(SFlux.just(10, 20, 30))))
+ .thenAwait(2 seconds)
+ .expectNext(10, 20, 30)
+ .verifyComplete()
+ }
+ "with firstTimeout should throw exception if the first item is not emitted before the given publisher emits" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3).delayElements(2 seconds).timeout(SMono.just(1)))
+ .thenAwait(2 seconds)
+ .expectError(classOf[TimeoutException])
+ .verify()
+ }
+ "with firstTimeout and next timeout factory should throw exception if any of the item from this flux does not emit before the timeout provided" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3).delayElements(2 seconds).timeout(SMono.just(1).delaySubscription(3 seconds), t => SMono.just(1).delaySubscription(t seconds)))
+ .thenAwait(5 seconds)
+ .expectNext(1)
+ .expectError(classOf[TimeoutException])
+ .verify()
+ }
+ "with firstTimeout, nextTimeoutFactory and fallback should fallback if any of the item is not emitted within the timeout period" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3).delayElements(2 seconds).timeout(SMono.just(1).delaySubscription(3 seconds), t => SMono.just(1).delaySubscription(t seconds), SFlux.just(10, 20, 30)))
+ .thenAwait(5 seconds)
+ .expectNext(1, 10, 20, 30)
+ .verifyComplete()
+ }
+ }
+
+ ".toIterable" - {
+ "should transform this flux into iterable" in {
+ SFlux.just(1, 2, 3).toIterable().toList shouldBe Iterable(1, 2, 3)
+ }
+ "with batchSize should transform this flux into iterable" in {
+ SFlux.just(1, 2, 3).toIterable(1).toList shouldBe Iterable(1, 2, 3)
+ }
+ "with batchSize and queue supplier should transform this flux into interable" in {
+ SFlux.just(1, 2, 3).toIterable(1, Option(Queues.get[Int](1))).toList shouldBe Iterable(1, 2, 3)
+ }
+ }
+
+ ".toStream" - {
+ "should transform this flux into stream" in {
+ SFlux.just(1, 2, 3).toStream() shouldBe Stream(1, 2, 3)
+ }
+ "with batchSize should transform this flux into stream" in {
+ SFlux.just(1, 2, 3).toStream(2) shouldBe Stream(1, 2, 3)
+ }
+ }
+
+ ".transform should defer transformation of this flux to another publisher" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).transform(Mono.from))
+ .expectNext(1)
+ .verifyComplete()
+ }
+
+ ".withLatestFrom should combine with the latest of the other publisher" in {
+ StepVerifier.withVirtualTime(() => SFlux.just(1, 2, 3, 4).delayElements(1 second).withLatestFrom(SFlux.just("one", "two", "three").delayElements(1500 milliseconds), (i: Int, s: String) => (i, s)))
+ .thenAwait(5 seconds)
+ .expectNext((2, "one"), (3, "two"), (4, "two"))
+ .verifyComplete()
+ }
+
+ ".zipWith" - {
+ "should zip both publishers" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).zipWith(SFlux.just(10, 20, 30)))
+ .expectNext((1, 10), (2, 20), (3, 30))
+ .verifyComplete()
+ }
+ "with combinator should zip and apply the combinator" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).zipWithCombinator[Int, Int](SFlux.just(10, 20, 30), (i1: Int, i2: Int) => i1 + i2))
+ .expectNext(11, 22, 33)
+ .verifyComplete()
+ }
+ "with combinator and prefetch should zip and apply the combinator" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).zipWithCombinator[Int, Int](SFlux.just(10, 20, 30), (i1: Int, i2: Int) => i1 + i2, 1))
+ .expectNext(11, 22, 33)
+ .verifyComplete()
+ }
+ "with prefetch should zip both publishers" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).zipWith(SFlux.just(10, 20, 30), 1))
+ .expectNext((1, 10), (2, 20), (3, 30))
+ .verifyComplete()
+ }
+ }
+
+ ".zipWithIterable" - {
+ "should zip with the provided iterable" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).zipWithIterable(Iterable(10, 20, 30)))
+ .expectNext((1, 10), (2, 20), (3, 30))
+ .verifyComplete()
+ }
+ "with zipper should zip and apply the zipper" in {
+ StepVerifier.create(SFlux.just(1, 2, 3).zipWithIterable[Int, Int](Iterable(10, 20, 30), (i1: Int, i2: Int) => i1 + i2))
+ .expectNext(11, 22, 33)
+ .verifyComplete()
+ }
+ }
+
+ ".zipWithTimeSinceSubscribe should emit tuple2 with the second element as the time taken to emit since subscription in milliseconds" in {
+ StepVerifier.withVirtualTime(() => Flux.just(1, 2, 3).delayElements(1 second).zipWithTimeSinceSubscribe())
+ .thenAwait(3 seconds)
+ .expectNext((1, 1000l), (2, 2000l), (3, 3000l))
+ .verifyComplete()
+ }
+
+ ".asJava should convert to java" in {
+ SFlux.just(1, 2, 3).asJava() shouldBe a[reactor.core.publisher.Flux[_]]
+ }
+ }
+}
diff --git a/src/test/scala/reactor/core/scala/publisher/SMonoTest.scala b/src/test/scala/reactor/core/scala/publisher/SMonoTest.scala
new file mode 100644
index 00000000..87d9b534
--- /dev/null
+++ b/src/test/scala/reactor/core/scala/publisher/SMonoTest.scala
@@ -0,0 +1,1144 @@
+package reactor.core.scala.publisher
+
+import java.util.concurrent._
+import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger, AtomicLong, AtomicReference}
+
+import org.mockito.Mockito.spy
+import org.mockito.{ArgumentMatchers, Mockito}
+import org.reactivestreams.Subscription
+import org.scalatest.{AsyncFreeSpec, FreeSpec, Matchers}
+import reactor.core.Disposable
+import reactor.core.publisher.{BaseSubscriber, Signal, SynchronousSink, Mono => JMono}
+import reactor.core.scala.Scannable
+import reactor.core.scala.publisher.Mono.just
+import reactor.core.scala.publisher.ScalaConverters._
+import reactor.core.scheduler.{Scheduler, Schedulers}
+import reactor.test.{StepVerifier, StepVerifierOptions}
+import reactor.test.scheduler.VirtualTimeScheduler
+import reactor.util.context.Context
+
+import scala.concurrent.Future
+import scala.concurrent.duration._
+import scala.language.postfixOps
+import scala.math.ScalaNumber
+import scala.util.{Failure, Random, Success, Try}
+
+class SMonoTest extends FreeSpec with Matchers with TestSupport {
+ private val randomValue = Random.nextLong()
+
+ "SMono" - {
+ ".create should create a Mono" in {
+ StepVerifier.create(SMono.create[Long](monoSink => monoSink.success(randomValue)))
+ .expectNext(randomValue)
+ .expectComplete()
+ .verify()
+ }
+
+ ".defer should create a Mono with deferred Mono" in {
+ StepVerifier.create(SMono.defer(() => SMono.just(randomValue)))
+ .expectNext(randomValue)
+ .expectComplete()
+ .verify()
+ }
+
+ ".delay should create a Mono with the first element delayed according to the provided" - {
+ "duration" in {
+ StepVerifier.withVirtualTime(() => SMono.delay(5 days))
+ .thenAwait(5 days)
+ .expectNextCount(1)
+ .expectComplete()
+ .verify()
+ }
+ "duration in millis with given TimeScheduler" in {
+ val vts = VirtualTimeScheduler.getOrSet()
+ StepVerifier.create(SMono.delay(50 seconds, vts))
+ .`then`(() => vts.advanceTimeBy(50 seconds))
+ .expectNextCount(1)
+ .expectComplete()
+ .verify()
+
+ }
+ }
+
+ ".empty " - {
+ "without source should create an empty Mono" in {
+ StepVerifier.create(Mono.empty)
+ .verifyComplete()
+ }
+ }
+
+ ".firstEmitter" - {
+ "with varargs should create mono that emit the first item" in {
+ StepVerifier.withVirtualTime(() => SMono.firstEmitter(SMono.just(1).delaySubscription(3 seconds), SMono.just(2).delaySubscription(2 seconds)))
+ .thenAwait(3 seconds)
+ .expectNext(2)
+ .verifyComplete()
+ }
+ }
+
+ ".from" - {
+ "a publisher should ensure that the publisher will emit 0 or 1 item." in {
+ StepVerifier.create(SMono.fromPublisher(SFlux.just(1, 2, 3, 4, 5)))
+ .expectNext(1)
+ .expectComplete()
+ .verify()
+ }
+
+ "a callable should ensure that Mono will return a value from the Callable" in {
+ StepVerifier.create(SMono.fromCallable(() => randomValue))
+ .expectNext(randomValue)
+ .expectComplete()
+ .verify()
+ }
+
+ "source direct should return mono of the source" in {
+ StepVerifier.create(SMono.fromDirect(Flux.just(1, 2, 3)))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+
+ "a future should result Mono that will return the value from the future object" in {
+ import scala.concurrent.ExecutionContext.Implicits.global
+ StepVerifier.create(SMono.fromFuture(Future[Long] {
+ randomValue
+ }))
+ .expectNext(randomValue)
+ .verifyComplete()
+ }
+
+ "a Try should result SMono that when it is a" - {
+ "Success will emit the value of the Try" in {
+ def aSuccess = Try(randomValue)
+ StepVerifier.create(SMono.fromTry(aSuccess))
+ .expectNext(randomValue)
+ .verifyComplete()
+ }
+ "Failure will emit onError with the exception" in {
+ def aFailure = Try(throw new RuntimeException("error message"))
+ StepVerifier.create(SMono.fromTry(aFailure))
+ .expectErrorMessage("error message")
+ .verify()
+ }
+ }
+ }
+
+ ".ignoreElements should ignore all elements from a publisher and just react on completion signal" in {
+ StepVerifier.create(SMono.ignoreElements(SMono.just(randomValue)))
+ .expectComplete()
+ .verify()
+ }
+
+ ".just should emit the specified item" in {
+ StepVerifier.create(SMono.just(randomValue))
+ .expectNext(randomValue)
+ .verifyComplete()
+ }
+
+ ".justOrEmpty" - {
+ "with Option should" - {
+ "emit the specified item if the option is not empty" in {
+ StepVerifier.create(SMono.justOrEmpty(Option(randomValue)))
+ .expectNext(randomValue)
+ .verifyComplete()
+ }
+ "just react on completion signal if the option is empty" in {
+ StepVerifier.create(SMono.justOrEmpty(Option.empty))
+ .expectComplete()
+ .verify()
+ }
+ }
+ "with data should" - {
+ "emit the specified item if it is not null" in {
+ val mono = SMono.justOrEmpty(randomValue)
+ StepVerifier.create(mono)
+ .expectNext(randomValue)
+ .verifyComplete()
+ }
+ "just react on completion signal if it is null" in {
+ val nullData:Any = null
+ val mono = SMono.justOrEmpty(nullData)
+ StepVerifier.create(mono)
+ .expectComplete()
+ .verify()
+ }
+ }
+ }
+
+ ".never will never signal any data, error or completion signal" in {
+ StepVerifier.create(SMono.never)
+ .expectSubscription()
+ .expectNoEvent(1 second)
+ }
+
+ ".name should give name to this sequence" in {
+ val name = "one two three four"
+ val scannable: Scannable = Scannable.from(Option(SMono.just(randomValue).name(name)))
+ scannable.name shouldBe name
+ }
+
+ ".sequenceEqual should" - {
+ "emit Boolean.TRUE when both publisher emit the same value" in {
+ StepVerifier.create(SMono.sequenceEqual(just(1), just(1)))
+ .expectNext(true)
+ .verifyComplete()
+ }
+ "emit true when both publisher emit the same value according to the isEqual function" in {
+ val mono = SMono.sequenceEqual[Int](just(10), just(100), (t1: Int, t2: Int) => t1 % 10 == t2 % 10)
+ StepVerifier.create(mono)
+ .expectNext(true)
+ .verifyComplete()
+ }
+ "emit true when both publisher emit the same value according to the isEqual function with bufferSize" in {
+ val mono = SMono.sequenceEqual[Int](just(10), just(100), (t1: Int, t2: Int) => t1 % 10 == t2 % 10, 2)
+ StepVerifier.create(mono)
+ .expectNext(true)
+ .verifyComplete()
+
+ }
+ }
+
+ ".raiseError should create Mono that emit error" in {
+ StepVerifier.create(SMono.raiseError(new RuntimeException("runtime error")))
+ .expectError(classOf[RuntimeException])
+ .verify()
+ }
+
+ ".when" - {
+ "with iterable" - {
+ "of publisher of unit should return when all of the sources has fulfilled" in {
+ val completed = new ConcurrentHashMap[String, Boolean]()
+ val mono = SMono.when(Iterable(
+ SMono.just[Unit]({
+ completed.put("first", true)
+ }),
+ SMono.just[Unit]({
+ completed.put("second", true)
+ })
+ ))
+ StepVerifier.create(mono)
+ .expectComplete()
+ completed should contain key "first"
+ completed should contain key "second"
+ }
+ }
+
+ "with varargs of publisher should return when all of the resources has fulfilled" in {
+ val completed = new ConcurrentHashMap[String, Boolean]()
+ val sources = Seq(just[Unit]({
+ completed.put("first", true)
+ }),
+ just[Unit]({
+ completed.put("second", true)
+ })
+ )
+ StepVerifier.create(SMono.when(sources.toArray: _*))
+ .expectComplete()
+ completed should contain key "first"
+ completed should contain key "second"
+ }
+ }
+
+ ".zipDelayError" - {
+ "with p1 and p2 should merge when both Monos are fulfilled" in {
+ StepVerifier.create(SMono.zipDelayError(SMono.just(1), SMono.just("one")))
+ .expectNext((1, "one"))
+ .verifyComplete()
+ }
+
+ "with p1, p2 and p3 should merge when all Monos are fulfilled" in {
+ StepVerifier.create(SMono.zipDelayError(SMono.just(1), SMono.just("one"), SMono.just(1L)))
+ .expectNext((1, "one", 1L))
+ .verifyComplete()
+ }
+
+ "with p1, p2, p3 and p4 should merge when all Monos are fulfilled" in {
+ StepVerifier.create(SMono.zipDelayError(SMono.just(1), SMono.just(2), SMono.just(3), SMono.just(4)))
+ .expectNext((1, 2, 3, 4))
+ .verifyComplete()
+ }
+
+ "with p1, p2, p3, p4 and p5 should merge when all Monos are fulfilled" in {
+ StepVerifier.create(SMono.zipDelayError(SMono.just(1), SMono.just(2), SMono.just(3), SMono.just(4), SMono.just(5)))
+ .expectNext((1, 2, 3, 4, 5))
+ .verifyComplete()
+ }
+
+ "with p1, p2, p3, p4, p5 and p6 should merge when all Monos are fulfilled" in {
+ StepVerifier.create(SMono.zipDelayError(SMono.just(1), SMono.just(2), SMono.just(3), SMono.just(4), SMono.just(5), SMono.just(6)))
+ .expectNext((1, 2, 3, 4, 5, 6))
+ .verifyComplete()
+ }
+
+ "with iterable" - {
+ "of publisher of unit should return when all of the sources has fulfilled" in {
+ val completed = new ConcurrentHashMap[String, Boolean]()
+ val mono = SMono.whenDelayError(Iterable(
+ SMono.just[Unit]({
+ completed.put("first", true)
+ }),
+ SMono.just[Unit]({
+ completed.put("second", true)
+ })
+ ))
+ StepVerifier.create(mono)
+ .expectComplete()
+ completed should contain key "first"
+ completed should contain key "second"
+ }
+
+ "of combinator function and monos should emit the value after combined by combinator function" in {
+ StepVerifier.create(SMono.zipDelayError((values: Array[Any]) => s"${values(0).toString}-${values(1).toString}", SMono.just(1), SMono.just("one")))
+ .expectNext("1-one")
+ .verifyComplete()
+ }
+ }
+ }
+
+ ".zip" - {
+ val combinator: Array[AnyRef] => String = { datas => datas.map(_.toString).foldLeft("") { (acc, v) => if (acc.isEmpty) v else s"$acc-$v" } }
+ "with combinator function and varargs of mono should fullfill when all Monos are fulfilled" in {
+ val mono = SMono.zip(combinator, SMono.just(1), SMono.just(2))
+ StepVerifier.create(mono)
+ .expectNext("1-2")
+ .verifyComplete()
+ }
+ "with combinator function and Iterable of mono should fulfill when all Monos are fulfilled" in {
+ val mono = SMono.zip(Iterable(SMono.just(1), SMono.just("2")), combinator)
+ StepVerifier.create(mono)
+ .expectNext("1-2")
+ .verifyComplete()
+ }
+ }
+
+ ".and" - {
+ "should combine this mono and the other" in {
+ StepVerifier.create(SMono.just(1) and SMono.just(2))
+ .verifyComplete()
+ }
+ }
+
+ ".as should transform the Mono to whatever the transformer function is provided" in {
+ val mono = SMono.just(randomValue)
+
+ StepVerifier.create(mono.as(m => SFlux.fromPublisher(m)))
+ .expectNext(randomValue)
+ .verifyComplete()
+ }
+
+ ".asJava should convert to java" in {
+ SMono.just(randomValue).asJava() shouldBe a[JMono[_]]
+ }
+
+ ".asScala should transform Mono to SMono" in {
+ JMono.just(randomValue).asScala shouldBe an[SMono[_]]
+ }
+
+ ".block" - {
+ "should block the mono to get the value" in {
+ SMono.just(randomValue).block() shouldBe randomValue
+ }
+ "with duration should block the mono up to the duration" in {
+ SMono.just(randomValue).block(10 seconds) shouldBe randomValue
+ }
+ }
+
+ ".blockOption" - {
+ "without duration" - {
+ "should block the mono to get value" in {
+ SMono.just(randomValue).blockOption() shouldBe Some(randomValue)
+ }
+ "should return None if mono is empty" in {
+ SMono.empty.blockOption() shouldBe None
+ }
+ }
+ "with duration" - {
+ "should block the mono up to the duration" in {
+ SMono.just(randomValue).blockOption(10 seconds) shouldBe Some(randomValue)
+ }
+ "shouldBlock the mono up to the duration and return None" in {
+ StepVerifier.withVirtualTime(() => SMono.just(SMono.empty.blockOption(10 seconds)))
+ .thenAwait(10 seconds)
+ .expectNext(None)
+ .verifyComplete()
+ }
+ }
+ }
+
+ ".cast should cast the underlying value" in {
+ val number = SMono.just(BigDecimal("123")).cast(classOf[ScalaNumber]).block()
+ number shouldBe a[ScalaNumber]
+ }
+
+ ".cache" - {
+ "should cache the value" in {
+ val queue = new ArrayBlockingQueue[Int](1)
+ queue.put(1)
+ val mono = SMono.create[Int](sink => {
+ sink.success(queue.poll())
+ }).cache()
+ StepVerifier.create(mono)
+ .expectNext(1)
+ .verifyComplete()
+ StepVerifier.create(mono)
+ .expectNext(1)
+ .verifyComplete()
+ }
+ "with ttl cache the value up to specific time" in {
+ import reactor.test.scheduler.VirtualTimeScheduler
+ val timeScheduler = VirtualTimeScheduler.getOrSet
+ val queue = new ArrayBlockingQueue[Int](1)
+ queue.put(1)
+ val mono = SMono.create[Int](sink => {
+ sink.success(queue.poll())
+ }).cache(1 minute)
+ StepVerifier.create(mono)
+ .expectNext(1)
+ .verifyComplete()
+ timeScheduler.advanceTimeBy(59 second)
+ StepVerifier.create(mono)
+ .expectNext(1)
+ .verifyComplete()
+ timeScheduler.advanceTimeBy(2 minute)
+ StepVerifier.create(mono)
+ .verifyComplete()
+ }
+ }
+
+ ".cancelOn should cancel the subscriber on a particular scheduler" in {
+ val jMono = spy(JMono.just(1))
+ new ReactiveSMono[Int](jMono).cancelOn(Schedulers.immediate())
+ Mockito.verify(jMono).cancelOn(ArgumentMatchers.any[Scheduler]())
+ }
+
+ ".compose should defer creating the target mono type" in {
+ StepVerifier.create(SMono.just(1).compose[String](m => SFlux.fromPublisher(m.map(_.toString))))
+ .expectNext("1")
+ .verifyComplete()
+ }
+
+ ".concatWith should concatenate mono with another source" in {
+ StepVerifier.create(SMono.just(1).concatWith(SMono.just(2)))
+ .expectNext(1)
+ .expectNext(2)
+ .verifyComplete()
+ }
+
+ "++ should concatenate mono with another source" in {
+ StepVerifier.create(SMono.just(1) ++ SMono.just(2))
+ .expectNext(1)
+ .expectNext(2)
+ .verifyComplete()
+ }
+
+ ".defaultIfEmpty should use the provided default value if the mono is empty" in {
+ StepVerifier.create(SMono.empty[Int].defaultIfEmpty(-1))
+ .expectNext(-1)
+ .verifyComplete()
+ }
+
+ ".delayElement" - {
+ "should delay the element" in {
+ StepVerifier.withVirtualTime(() => SMono.just(randomValue).delayElement(5 seconds))
+ .thenAwait(5 seconds)
+ .expectNext(randomValue)
+ .verifyComplete()
+ }
+ "with timer should delay using timer" in {
+ StepVerifier.withVirtualTime(() => SMono.just(randomValue).delayElement(5 seconds, Schedulers.immediate()))
+ .thenAwait(5 seconds)
+ .expectNext(randomValue)
+ .verifyComplete()
+ }
+ }
+
+ ".delaySubscription" - {
+ "with delay duration should delay subscription as long as the provided duration" in {
+ StepVerifier.withVirtualTime(() => SMono.just(1).delaySubscription(1 hour))
+ .thenAwait(1 hour)
+ .expectNext(1)
+ .verifyComplete()
+ }
+ "with delay duration and scheduler should delay subscription as long as the provided duration" in {
+ StepVerifier.withVirtualTime(() => SMono.just(1).delaySubscription(1 hour, Schedulers.single()))
+ .thenAwait(1 hour)
+ .expectNext(1)
+ .verifyComplete()
+ }
+ "with another publisher should delay the current subscription until the other publisher completes" in {
+ StepVerifier.withVirtualTime(() => SMono.just(1).delaySubscription(SMono.just("one").delaySubscription(1 hour)))
+ .thenAwait(1 hour)
+ .expectNext(1)
+ .verifyComplete()
+
+ }
+ }
+
+ ".delayUntil should delay until the other provider terminate" in {
+ StepVerifier.withVirtualTime(() => SMono.just(randomValue).delayUntil(_ => SFlux.just(1, 2).delayElements(2 seconds)))
+ .thenAwait(4 seconds)
+ .expectNext(randomValue)
+ .verifyComplete()
+ }
+
+ ".dematerialize should dematerialize the underlying mono" in {
+ StepVerifier.create(SMono.just(Signal.next(randomValue)).dematerialize())
+ .expectNext(randomValue)
+ .verifyComplete()
+ }
+
+ ".doAfterSuccessOrError should call the callback function after the mono is terminated" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ StepVerifier.create(SMono.just(randomValue)
+ .doAfterSuccessOrError { t =>
+ atomicBoolean.compareAndSet(false, true) shouldBe true
+ t shouldBe Success(randomValue)
+ })
+ .expectNext(randomValue)
+ .verifyComplete()
+ atomicBoolean shouldBe 'get
+ val exception = new RuntimeException
+ StepVerifier.create(SMono.raiseError[Long](exception)
+ .doAfterSuccessOrError { t =>
+ atomicBoolean.compareAndSet(true, false) shouldBe true
+ t shouldBe Failure(exception)
+ })
+ .expectError()
+ .verify()
+ atomicBoolean.get() shouldBe false
+ }
+
+ ".doAfterTerminate should call the callback function after the mono is terminated" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ StepVerifier.create(SMono.just(randomValue).doAfterTerminate(() => atomicBoolean.compareAndSet(false, true)))
+ .expectNext(randomValue)
+ .verifyComplete()
+ atomicBoolean shouldBe 'get
+ }
+
+ ".doFinally should call the callback" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ StepVerifier.create(SMono.just(randomValue)
+ .doFinally(_ => atomicBoolean.compareAndSet(false, true) shouldBe true))
+ .expectNext(randomValue)
+ .verifyComplete()
+ atomicBoolean shouldBe 'get
+ }
+
+ ".doOnCancel should call the callback function when the subscription is cancelled" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ val mono = SMono.delay(1 minute)
+ .doOnCancel(() => {
+ atomicBoolean.compareAndSet(false, true) shouldBe true
+ })
+
+ val subscriptionReference = new AtomicReference[Subscription]()
+ mono.subscribe(new BaseSubscriber[Long] {
+ override def hookOnSubscribe(subscription: Subscription): Unit = {
+ subscriptionReference.set(subscription)
+ subscription.request(1)
+ }
+
+ override def hookOnNext(value: Long): Unit = ()
+ })
+ subscriptionReference.get().cancel()
+ atomicBoolean shouldBe 'get
+ }
+
+ ".doOnNext should call the callback function when the mono emit data successfully" in {
+ val atomicLong = new AtomicLong()
+ StepVerifier.create(SMono.just(randomValue)
+ .doOnNext(t => atomicLong.compareAndSet(0, t)))
+ .expectNext(randomValue)
+ .verifyComplete()
+ atomicLong.get() shouldBe randomValue
+ }
+
+ ".doOnSuccess should call the callback function when the mono completes successfully" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ StepVerifier.create(SMono.empty[Int]
+ .doOnSuccess(_ => atomicBoolean.compareAndSet(false, true) shouldBe true))
+ .verifyComplete()
+ atomicBoolean shouldBe 'get
+ }
+
+ ".doOnError" - {
+ "with callback function should call the callback function when the mono encounter error" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ StepVerifier.create(SMono.raiseError(new RuntimeException())
+ .doOnError(_ => atomicBoolean.compareAndSet(false, true) shouldBe true))
+ .expectError(classOf[RuntimeException])
+ .verify()
+ atomicBoolean shouldBe 'get
+ }
+ }
+
+ ".doOnRequest should call the callback function when subscriber request data" in {
+ val atomicLong = new AtomicLong(0)
+ SMono.just(randomValue)
+ .doOnRequest(l => atomicLong.compareAndSet(0, l)).subscribe(new BaseSubscriber[Long] {
+ override def hookOnSubscribe(subscription: Subscription): Unit = {
+ subscription.request(1)
+ ()
+ }
+
+ override def hookOnNext(value: Long): Unit = ()
+ })
+ atomicLong.get() shouldBe 1
+ }
+
+ ".doOnSubscribe should call the callback function when the mono is subscribed" in {
+ val atomicBoolean = new AtomicBoolean(false)
+ StepVerifier.create(SMono.just(randomValue)
+ .doOnSubscribe(_ => atomicBoolean.compareAndSet(false, true)))
+ .expectNextCount(1)
+ .verifyComplete()
+ atomicBoolean shouldBe 'get
+ }
+
+ ".doOnTerminate should do something on terminate" in {
+ val atomicLong = new AtomicLong()
+ StepVerifier.create(SMono.just(randomValue).doOnTerminate { () => atomicLong.set(randomValue) })
+ .expectNext(randomValue)
+ .expectComplete()
+ .verify()
+ atomicLong.get() shouldBe randomValue
+ }
+
+ ".elapsed" - {
+ "should provide the time elapse when this mono emit value" in {
+ StepVerifier.withVirtualTime(() => SMono.just(randomValue).delaySubscription(1 second).elapsed(), 1)
+ .thenAwait(1 second)
+ .expectNextMatches((t: (Long, Long)) => t match {
+ case (time, data) => time >= 1000 && data == randomValue
+ })
+ .verifyComplete()
+ }
+ "with TimedScheduler should provide the time elapsed using the provided scheduler when this mono emit value" in {
+ val virtualTimeScheduler = VirtualTimeScheduler.getOrSet()
+ StepVerifier.create(SMono.just(randomValue)
+ .delaySubscription(1 second, virtualTimeScheduler)
+ .elapsed(virtualTimeScheduler), 1)
+ .`then`(() => virtualTimeScheduler.advanceTimeBy(1 second))
+ .expectNextMatches((t: (Long, Long)) => t match {
+ case (time, data) => time >= 1000 && data == randomValue
+ })
+ .verifyComplete()
+ }
+ }
+
+ ".expandDeep" - {
+ "should expand the mono" in {
+ StepVerifier.create(SMono.just("a").expandDeep(s => SMono.just(s"$s$s")).take(3))
+ .expectNext("a", "aa", "aaaa")
+ .verifyComplete()
+ }
+ "with capacity hint should expand the mono" in {
+ StepVerifier.create(SMono.just("a").expandDeep(s => SMono.just(s"$s$s"), 10).take(3))
+ .expectNext("a", "aa", "aaaa")
+ .verifyComplete()
+ }
+ }
+
+ ".expand" - {
+ "should expand the mono" in {
+ StepVerifier.create(SMono.just("a").expand(s => SMono.just(s"$s$s")).take(3))
+ .expectNext("a", "aa", "aaaa")
+ .verifyComplete()
+ }
+ "with capacity hint should expand the mono" in {
+ StepVerifier.create(SMono.just("a").expand(s => SMono.just(s"$s$s"), 10).take(3))
+ .expectNext("a", "aa", "aaaa")
+ .verifyComplete()
+ }
+ }
+
+ ".filter should filter the value of mono where it pass the provided predicate" in {
+ StepVerifier.create(SMono.just(10)
+ .filter(i => i < 10))
+ .verifyComplete()
+ }
+
+ ".filterWhen should replay the value of mono if the first item emitted by the test is true" in {
+ StepVerifier.create(SMono.just(10).filterWhen((i: Int) => SMono.just(i % 2 == 0)))
+ .expectNext(10)
+ .verifyComplete()
+ }
+
+ ".flatMap should flatmap the provided mono" in {
+ StepVerifier.create(Mono.just(randomValue).flatMap(l => Mono.just(l.toString)))
+ .expectNext(randomValue.toString)
+ .verifyComplete()
+ }
+
+ ".flatMapMany" - {
+ "with a single mapper should flatmap the value mapped by the provided mapper" in {
+ StepVerifier.create(SMono.just(1).flatMapMany(i => SFlux.just(i, i * 2)))
+ .expectNext(1, 2)
+ .verifyComplete()
+ }
+ "with mapperOnNext, mapperOnError and mapperOnComplete should mapped each individual event into values emitted by flux" in {
+ StepVerifier.create(SMono.just(1)
+ .flatMapMany(
+ _ => SMono.just("one"),
+ _ => SMono.just("error"),
+ () => SMono.just("complete")
+ ))
+ .expectNext("one", "complete")
+ .verifyComplete()
+ }
+ }
+
+ ".flatMapIterable should flatmap the value mapped by the provided mapper" in {
+ StepVerifier.create(SMono.just("one").flatMapIterable(str => str.toCharArray))
+ .expectNext('o', 'n', 'e')
+ .verifyComplete()
+ }
+
+ ".flux should convert this mono into a flux" in {
+ val flux = SMono.just(randomValue).flux()
+ StepVerifier.create(flux)
+ .expectNext(randomValue)
+ .verifyComplete()
+ flux shouldBe an[SFlux[Long]]
+ }
+
+ ".hasElement should convert to another Mono that emit" - {
+ "true if it has element" in {
+ StepVerifier.create(SMono.just(1).hasElement)
+ .expectNext(true)
+ .verifyComplete()
+ }
+ "false if it is empty" in {
+ StepVerifier.create(SMono.empty.hasElement)
+ .expectNext(false)
+ .verifyComplete()
+ }
+ }
+
+ ".handle should handle onNext, onError and onComplete" in {
+ StepVerifier.create(SMono.just(randomValue)
+ .handle((_: Long, s: SynchronousSink[String]) => {
+ s.next("One")
+ s.complete()
+ }))
+ .expectNext("One")
+ .verifyComplete()
+ }
+
+ ".ignoreElement should only emit termination event" in {
+ StepVerifier.create(SMono.just(randomValue).ignoreElement)
+ .verifyComplete()
+ }
+
+ ".map should map the type of Mono from T to R" in {
+ StepVerifier.create(SMono.just(randomValue).map(_.toString))
+ .expectNext(randomValue.toString)
+ .expectComplete()
+ .verify()
+ }
+
+ ".mapError" - {
+ class MyCustomException(val message: String) extends Exception(message)
+ "with mapper should map the error to another error" in {
+ StepVerifier.create(SMono.raiseError[Int](new RuntimeException("runtimeException"))
+ .onErrorMap { case t: Throwable => new MyCustomException(t.getMessage) })
+ .expectErrorMatches((t: Throwable) => {
+ t.getMessage shouldBe "runtimeException"
+ t should not be a[RuntimeException]
+ t shouldBe a[MyCustomException]
+ true
+ })
+ .verify()
+ }
+ "with an error type and mapper should" - {
+ "map the error to another type if the exception is according to the provided type" in {
+ StepVerifier.create(SMono.raiseError[Int](new RuntimeException("runtimeException"))
+ .onErrorMap { case t: RuntimeException => new MyCustomException(t.getMessage) })
+ .expectErrorMatches((t: Throwable) => {
+ t.getMessage shouldBe "runtimeException"
+ t should not be a[RuntimeException]
+ t shouldBe a[MyCustomException]
+ true
+ })
+ .verify()
+ }
+ "not map the error if the exception is not the type of provided exception class" in {
+ StepVerifier.create(SMono.raiseError[Int](new Exception("runtimeException"))
+ .onErrorMap {
+ case t: RuntimeException => new MyCustomException(t.getMessage)
+ })
+ .expectErrorMatches((t: Throwable) => {
+ t.getMessage shouldBe "runtimeException"
+ t should not be a[MyCustomException]
+ t shouldBe a[Exception]
+ true
+ })
+ .verify()
+ }
+ }
+ "with a predicate and mapper should" - {
+ "map the error to another type if the predicate returns true" in {
+ StepVerifier.create(SMono.raiseError[Int](new RuntimeException("should map"))
+ .onErrorMap { case t: Throwable if t.getMessage == "should map" => new MyCustomException(t.getMessage) })
+ .expectError(classOf[MyCustomException])
+ .verify()
+ }
+ "not map the error to another type if the predicate returns false" in {
+ StepVerifier.create(SMono.raiseError[Int](new RuntimeException("should not map"))
+ .onErrorMap { case t: Throwable if t.getMessage == "should map" => new MyCustomException(t.getMessage) })
+ .expectError(classOf[RuntimeException])
+ .verify()
+ }
+ }
+ }
+
+ ".materialize should convert the mono into a mono that emit its signal" in {
+ StepVerifier.create(SMono.just(randomValue).materialize())
+ .expectNext(Signal.next(randomValue))
+ .verifyComplete()
+ }
+
+ ".mergeWith should convert this mono to flux with value emitted from this mono followed by the other" in {
+ StepVerifier.create(SMono.just(1).mergeWith(SMono.just(2)))
+ .expectNext(1, 2)
+ .verifyComplete()
+ }
+
+ ".ofType should" - {
+ "convert the Mono value type to the provided type if it can be casted" in {
+ StepVerifier.create(SMono.just(BigDecimal("1")).ofType(classOf[ScalaNumber]))
+ .expectNextCount(1)
+ .verifyComplete()
+ }
+ "ignore the Mono value if it can't be casted" in {
+ StepVerifier.create(SMono.just(1).ofType(classOf[String]))
+ .verifyComplete()
+ }
+ }
+
+ ".onErrorRecover" - {
+ "should recover with a Mono of element that has been recovered" in {
+ StepVerifier.create(SMono.raiseError(new RuntimeException("oops"))
+ .onErrorRecover { case _ => Truck(5) })
+ .expectNext(Truck(5))
+ .verifyComplete()
+ }
+ }
+
+ ".onErrorResume" - {
+ "will fallback to the provided value when error happens" in {
+ StepVerifier.create(SMono.raiseError(new RuntimeException()).onErrorResume(_ => SMono.just(-1)))
+ .expectNext(-1)
+ .verifyComplete()
+ }
+ "with class type and fallback function will fallback to the provided value when the exception is of provided type" in {
+ StepVerifier.create(SMono.raiseError(new RuntimeException()).onErrorResume {
+ case _: Exception => SMono.just(-1)
+ })
+ .expectNext(-1)
+ .verifyComplete()
+
+ StepVerifier.create(SMono.raiseError(new Exception()).onErrorResume {
+ case _: RuntimeException => SMono.just(-1)
+ })
+ .expectError(classOf[Exception])
+ .verify()
+ }
+ "with predicate and fallback function will fallback to the provided value when the predicate returns true" in {
+ StepVerifier.create(SMono.raiseError(new RuntimeException("fallback")).onErrorResume {
+ case t if t.getMessage == "fallback" => SMono.just(-1)
+ })
+ .expectNext(-1)
+ .verifyComplete()
+ }
+ }
+
+ ".or should return Mono that emit the value between the two Monos that is emited first" in {
+ StepVerifier.create(SMono.delay(5 seconds).or(SMono.just(2)))
+ .expectNext(2)
+ .verifyComplete()
+ }
+
+ ".publish should share and may transform it and consume it as many times as necessary without causing" +
+ "multiple subscription" in {
+ val mono = SMono.just(randomValue).publish[String](ml => ml.map(l => l.toString))
+
+ val counter = new AtomicLong()
+
+ val subscriber = new BaseSubscriber[String] {
+ override def hookOnSubscribe(subscription: Subscription): Unit = {
+ subscription.request(1)
+ counter.incrementAndGet()
+ }
+
+ override def hookOnNext(value: String): Unit = ()
+ }
+ mono.subscribe(subscriber)
+ mono.subscribe(subscriber)
+ counter.get() shouldBe 1
+ }
+
+ ".repeat" - {
+ "should return flux that repeat the value from this mono" in {
+ StepVerifier.create(SMono.just(randomValue).repeat().take(3))
+ .expectNext(randomValue, randomValue, randomValue)
+ .verifyComplete()
+ }
+ "with boolean predicate should repeat the value from this mono as long as the predicate returns true" in {
+ val counter = new AtomicLong()
+ val flux = SMono.just(randomValue)
+ .repeat(predicate = () => counter.get() < 3)
+ val buffer = new LinkedBlockingQueue[Long]()
+ val latch = new CountDownLatch(1)
+ flux.subscribe(new BaseSubscriber[Long] {
+ override def hookOnSubscribe(subscription: Subscription): Unit = subscription.request(Long.MaxValue)
+
+ override def hookOnNext(value: Long): Unit = {
+ counter.incrementAndGet()
+ buffer.put(value)
+ }
+
+ override def hookOnComplete(): Unit = latch.countDown()
+ })
+ if (latch.await(1, TimeUnit.SECONDS))
+ buffer should have size 3
+ else
+ fail("no completion signal is detected")
+
+ }
+ "with number of repeat should repeat value from this value as many as the provided parameter" in {
+ StepVerifier.create(SMono.just(randomValue).repeat(5))
+ // this is a bug in https://github.com/reactor/reactor-core/issues/1252. It should only be 5 in total
+ .expectNext(randomValue, randomValue, randomValue, randomValue, randomValue, randomValue)
+ .verifyComplete()
+ }
+ "with number of repeat and predicate should repeat value from this value as many as provided parameter and as" +
+ "long as the predicate returns true" in {
+ val counter = new AtomicLong()
+ val flux = SMono.just(randomValue).repeat(5, () => counter.get() < 3)
+ val buffer = new LinkedBlockingQueue[Long]()
+ val latch = new CountDownLatch(1)
+ flux.subscribe(new BaseSubscriber[Long] {
+ override def hookOnSubscribe(subscription: Subscription): Unit = subscription.request(Long.MaxValue)
+
+ override def hookOnNext(value: Long): Unit = {
+ counter.incrementAndGet()
+ buffer.put(value)
+ }
+
+ override def hookOnComplete(): Unit = latch.countDown()
+ })
+ if (latch.await(1, TimeUnit.SECONDS))
+ buffer should have size 3
+ else
+ fail("no completion signal is detected")
+ }
+ }
+
+ ".repeatWhen should emit the value of this mono accompanied by the publisher" in {
+ StepVerifier.create(SMono.just(randomValue).repeatWhen((_: SFlux[Long]) => SFlux.just[Long](10, 20)))
+ .expectNext(randomValue, randomValue, randomValue)
+ .verifyComplete()
+ }
+
+ ".repeatWhenEmpty should emit resubscribe to this mono when the companion is empty" in {
+ val counter = new AtomicInteger(0)
+ StepVerifier.create(SMono.empty.doOnSubscribe(_ => counter.incrementAndGet()).repeatWhenEmpty((_: SFlux[Long]) => SFlux.just(-1, -2, -3)))
+ .verifyComplete()
+ counter.get() shouldBe 4
+ }
+
+ ".single" - {
+ "should enforce the existence of element" in {
+ StepVerifier.create(SMono.just(randomValue).single())
+ .expectNext(randomValue)
+ .verifyComplete()
+ }
+ "should throw exception if it is empty" in {
+ StepVerifier.create(SMono.empty.single())
+ .expectError(classOf[NoSuchElementException])
+ .verify()
+ }
+ }
+
+ ".subscribe" - {
+ "without parameter should return Disposable" in {
+ val x = SMono.just(randomValue).subscribe()
+ x shouldBe a[Disposable]
+ }
+ "with consumer should invoke the consumer" in {
+ val counter = new CountDownLatch(1)
+ val disposable = SMono.just(randomValue).subscribe(_ => counter.countDown())
+ disposable shouldBe a[Disposable]
+ counter.await(1, TimeUnit.SECONDS) shouldBe true
+ }
+ "with consumer and error consumer should invoke the error consumer when error happen" in {
+ val counter = new CountDownLatch(1)
+ val disposable = SMono.raiseError[Any](new RuntimeException()).subscribe(_ => (), _ => counter.countDown())
+ disposable shouldBe a[Disposable]
+ counter.await(1, TimeUnit.SECONDS) shouldBe true
+ }
+ "with consumer, error consumer and completeConsumer should invoke the completeConsumer when it's complete" in {
+ val counter = new CountDownLatch(2)
+ val disposable = SMono.just(randomValue).subscribe(_ => counter.countDown(), _ => (), counter.countDown())
+ disposable shouldBe a[Disposable]
+ counter.await(1, TimeUnit.SECONDS) shouldBe true
+ }
+ "with consumer, error consumer, completeConsumer and subscriptionConsumer should invoke the subscriptionConsumer when there is subscription" in {
+ val counter = new CountDownLatch(3)
+ val disposable = SMono.just(randomValue).subscribe(_ => counter.countDown(), _ => (), counter.countDown(), s => {
+ s.request(1)
+ counter.countDown()
+ })
+ disposable shouldBe a[Disposable]
+ counter.await(1, TimeUnit.SECONDS) shouldBe true
+ }
+ }
+
+ ".subscribeContext should pass context properly" in {
+ val key = "message"
+ val r: SMono[String] = SMono.just("Hello")
+ .flatMap(s => SMono.subscribeContext()
+ .map(ctx => s"$s ${ctx.get(key)}"))
+ .subscriberContext(ctx => ctx.put(key, "World"))
+
+ StepVerifier.create(r)
+ .expectNext("Hello World")
+ .verifyComplete()
+
+ StepVerifier.create(SMono.just(1).map(i => i + 10),
+ StepVerifierOptions.create().withInitialContext(Context.of("foo", "bar")))
+ .expectAccessibleContext()
+ .contains("foo", "bar")
+ .`then`()
+ .expectNext(11)
+ .verifyComplete()
+ }
+
+ ".switchIfEmpty with alternative will emit the value from alternative Mono when this mono is empty" in {
+ StepVerifier.create(SMono.empty.switchIfEmpty(SMono.just(-1)))
+ .expectNext(-1)
+ .verifyComplete()
+ }
+
+ ".tag should tag the Mono and accessible from Scannable" in {
+ val mono = SMono.just(randomValue).tag("integer", "one, two, three")
+ Scannable.from(Option(mono)).tags shouldBe Stream("integer" -> "one, two, three")
+ }
+
+ ".take" - {
+ "should complete after duration elapse" in {
+ StepVerifier.withVirtualTime(() => SMono.delay(10 seconds).take(5 seconds))
+ .thenAwait(5 seconds)
+ .verifyComplete()
+ }
+ "with duration and scheduler should complete after duration elapse" in {
+ StepVerifier.withVirtualTime(() => SMono.delay(10 seconds).take(5 seconds, Schedulers.parallel()))
+ .thenAwait(5 seconds)
+ .verifyComplete()
+ }
+ }
+
+ ".takeUntilOther should complete if the companion publisher emit any signal first" in {
+ StepVerifier.withVirtualTime(() => SMono.delay(10 seconds).takeUntilOther(SMono.just("a")))
+ .verifyComplete()
+ }
+
+ ".then" - {
+ "without parameter should only replays complete and error signals from this mono" in {
+ StepVerifier.create(SMono.just(randomValue).`then`())
+ .verifyComplete()
+ }
+ "with other mono should ignore element from this mono and transform its completion signal into emission and " +
+ "completion signal of the provided mono" in {
+ StepVerifier.create(SMono.just(randomValue).`then`(SMono.just("1")))
+ .expectNext("1")
+ .verifyComplete()
+ }
+ }
+
+ ".thenEmpty should complete this mono then for a supplied publisher to also complete" in {
+ val latch = new CountDownLatch(1)
+ val mono = SMono.just(randomValue)
+ .doOnSuccess(_ => latch.countDown())
+ .thenEmpty(SMono.empty)
+ StepVerifier.create(mono)
+ .verifyComplete()
+ latch.await(1, TimeUnit.SECONDS) shouldBe true
+ }
+
+ ".thenMany should ignore the element from this mono and transform the completion signal into a Flux that will emit " +
+ "from the provided publisher when the publisher is provided " - {
+ "directly" in {
+ StepVerifier.create(SMono.just(randomValue).thenMany(SFlux.just(1, 2, 3)))
+ .expectNext(1, 2, 3)
+ .verifyComplete()
+ }
+ }
+
+ ".timeout" - {
+ "should raise TimeoutException after duration elapse" in {
+ StepVerifier.withVirtualTime(() => SMono.delay(10 seconds).timeout(5 seconds))
+ .thenAwait(5 seconds)
+ .expectError(classOf[TimeoutException])
+ .verify()
+ }
+ "should fallback to the provided mono if the value doesn't arrive in given duration" in {
+ StepVerifier.withVirtualTime(() => SMono.delay(10 seconds).timeout(5 seconds, Option(SMono.just(1L))))
+ .thenAwait(5 seconds)
+ .expectNext(1)
+ .verifyComplete()
+ }
+ "with timeout and timer should signal TimeoutException if the item does not arrive before a given period" in {
+ val timer = VirtualTimeScheduler.getOrSet()
+ StepVerifier.withVirtualTime(() => SMono.delay(10 seconds).timeout(5 seconds, timer = timer), () => timer, 1)
+ .thenAwait(5 seconds)
+ .expectError(classOf[TimeoutException])
+ .verify()
+ }
+ "should raise TimeoutException if this mono has not emit value when the provided publisher has emit value" in {
+ val mono = SMono.delay(10 seconds).timeoutWhen(SMono.just("whatever"))
+ StepVerifier.create(mono)
+ .expectError(classOf[TimeoutException])
+ .verify()
+ }
+ "should fallback to the provided fallback mono if this mono does not emit value when the provided publisher emits value" in {
+ val mono = SMono.delay(10 seconds).timeoutWhen(SMono.just("whatever"), Option(SMono.just(-1L)))
+ StepVerifier.create(mono)
+ .expectNext(-1)
+ .verifyComplete()
+ }
+ "with timeout, fallback and timer should fallback to the given mono if the item does not arrive before a given period" in {
+ val timer = VirtualTimeScheduler.getOrSet()
+ StepVerifier.create(SMono.delay(10 seconds, timer)
+ .timeout(5 seconds, Option(SMono.just(-1)), timer), 1)
+ .`then`(() => timer.advanceTimeBy(5 seconds))
+ .expectNext(-1)
+ .verifyComplete()
+ }
+ }
+
+ ".transform should transform this mono in order to generate a target mono" in {
+ StepVerifier.create(SMono.just(randomValue).transform(ml => SMono.just(ml.block().toString)))
+ .expectNext(randomValue.toString)
+ .verifyComplete()
+ }
+
+ ".apply should convert to scala" in {
+ val mono = SMono(JMono.just(randomValue))
+ mono shouldBe a[SMono[_]]
+ }
+ }
+}
+
+class SMonoAsyncTest extends AsyncFreeSpec {
+ "SMono" - {
+ ".toFuture should convert this mono to future" in {
+ val future: Future[Int] = SMono.just(1).toFuture
+ future map { v => {
+ assert(v == 1)
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/test/scala/reactor/core/scala/publisher/ScannableTest.scala b/src/test/scala/reactor/core/scala/publisher/ScannableTest.scala
new file mode 100644
index 00000000..3724bafc
--- /dev/null
+++ b/src/test/scala/reactor/core/scala/publisher/ScannableTest.scala
@@ -0,0 +1,13 @@
+package reactor.core.scala.publisher
+
+import org.scalatest.{FreeSpec, Matchers}
+import reactor.core.scala.Scannable
+
+class ScannableTest extends FreeSpec with Matchers {
+ "Scannable" - {
+ ".stepName shoulr return a meaningful String representation of this Scannable in its chain of Scannable.parents and Scannable.actuals" in {
+ val scannable: Scannable = Scannable.from(Option(SMono.just(123).onTerminateDetach()))
+ scannable.stepName shouldBe "detach"
+ }
+ }
+}
diff --git a/src/test/scala/reactor/core/scala/scheduler/ExecutionContextSchedulerTest.scala b/src/test/scala/reactor/core/scala/scheduler/ExecutionContextSchedulerTest.scala
index d0b5b21b..3c8091e2 100644
--- a/src/test/scala/reactor/core/scala/scheduler/ExecutionContextSchedulerTest.scala
+++ b/src/test/scala/reactor/core/scala/scheduler/ExecutionContextSchedulerTest.scala
@@ -3,7 +3,7 @@ package reactor.core.scala.scheduler
import java.util.concurrent.{Executors, ThreadFactory}
import org.scalatest.{FreeSpec, Matchers}
-import reactor.core.scala.publisher.Mono
+import reactor.core.scala.publisher.{Mono, SMono}
import reactor.test.StepVerifier
import scala.concurrent.ExecutionContext
@@ -13,16 +13,29 @@ import scala.concurrent.ExecutionContext
*/
class ExecutionContextSchedulerTest extends FreeSpec with Matchers {
"ExecutionContextScheduler" - {
- "should create a Scheduler using provided ExecutionContext" in {
- val executionContext = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(1, new ThreadFactory {
- override def newThread(r: Runnable): Thread = new Thread(r, "THREAD-NAME")
- }))
- val mono = Mono.just(1)
- .subscribeOn(ExecutionContextScheduler(executionContext))
- .doOnNext(i => Thread.currentThread().getName shouldBe "THREAD-NAME")
- StepVerifier.create(mono)
- .expectNext(1)
- .verifyComplete()
+ "should create a Scheduler using provided ExecutionContext" - {
+ "on Mono" in {
+ val executionContext = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(1, new ThreadFactory {
+ override def newThread(r: Runnable): Thread = new Thread(r, "THREAD-NAME-MONO")
+ }))
+ val mono = Mono.just(1)
+ .subscribeOn(ExecutionContextScheduler(executionContext))
+ .doOnNext(i => Thread.currentThread().getName shouldBe "THREAD-NAME-MONO")
+ StepVerifier.create(mono)
+ .expectNext(1)
+ .verifyComplete()
+ }
+ "on SMono" in {
+ val executionContext = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(1, new ThreadFactory {
+ override def newThread(r: Runnable): Thread = new Thread(r, "THREAD-NAME-SMONO")
+ }))
+ val mono = SMono.just(1)
+ .subscribeOn(ExecutionContextScheduler(executionContext))
+ .doOnNext(i => Thread.currentThread().getName shouldBe "THREAD-NAME-SMONO")
+ StepVerifier.create(mono)
+ .expectNext(1)
+ .verifyComplete()
+ }
}
}
}
diff --git a/versions.gradle b/versions.gradle
index 765b8884..8607d45e 100644
--- a/versions.gradle
+++ b/versions.gradle
@@ -1,5 +1,5 @@
ext.defaultScala_2_11_Version = '2.11.12'
-ext.defaultScala_2_12_Version = '2.12.6'
+ext.defaultScala_2_12_Version = '2.12.8'
if (!hasProperty('scalaVersion')) {
ext.scalaVersion = ext.defaultScala_2_12_Version
@@ -19,15 +19,13 @@ ext {
logbackVersion = "1.3.0-alpha4"
mockitoVersion = "2.21.0"
pegdownVersion = "1.6.0"
- reactorVersion = "3.1.8.RELEASE"
+ reactorVersion = "3.2.9.RELEASE"
scalaLoggingVersion = "3.9.0"
scalatestVersion = "3.0.5-M1"
scoverageVersion = "1.4.0-M3"
}
ext.libraries = [
- bytebuddy : "net.bytebuddy:byte-buddy:$bytebuddyVersion",
- bytebuddyAgent : "net.bytebuddy:byte-buddy-agent:$bytebuddyVersion",
findbugs : "com.google.code.findbugs:jsr305:$findBugsVersion",
logback : "ch.qos.logback:logback-classic:$logbackVersion",
mockitoInline : "org.mockito:mockito-inline:$mockitoVersion",