Complete all data processing and close the program in time. ! not use sleep? how can i do?
Although sleep will be blocked to achieve the purpose of data processing, I cannot set a reasonable time, too small will lead to data processing cannot be completed, too large and can not bear too long.
package org.apache.spark.sql.catalyst.json.com
import zio.ZIO
import monix.eval._
import monix.execution.Callback
import monix.execution.Scheduler
import scala.concurrent.duration.DurationInt
import monix.reactive._
import monix.execution.Scheduler.Implicits.global
import java.lang.Thread.sleep
import scala.util.Random
object ScalaZIOLearn1 {
def main(args: Array[String]): Unit = {
val t: Observable[Task[Int]] = {
Observable
.fromIterable[Int](11 to 2200000 toIterable)
.asyncBoundary(OverflowStrategy.BackPressure(3))
// .interval(1.second)
// common filtering and mapping
.map {
x =>
println(s"map1 ${x}")
sleep(Random.nextInt(3000))
x + 10000
}
.asyncBoundary(OverflowStrategy.BackPressure(3))
.map { x =>
println(s"map2 ${x}")
sleep(Random.nextInt(3000))
x * 10
}
.map { x =>
Task {
// println(s"flatmap: ${x}")
println(s"map3 ${x} start")
sleep(15000)
println(s"map3 ${x} end")
x
}
}
}
t.subscribe()
sleep(Long.MaxValue) // its not smart. can you have other method? Complete all data processing and close the program in time. !
}
}