takeOrdered 案例
package com.sgg.sparkCore
import org.apache.spark.rdd.RDD
import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}
object SparkTrans012_takeOrdered {
def main(args: Array[String]): Unit = {
val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("s")
val sc = new SparkContext(conf)
sc.setLogLevel("WARN")
val rdd: RDD[Int] = sc.makeRDD(List(1, 2, 3, 4))
val ints: Array[Int] = rdd.takeOrdered(3)(ord = Ordering[Int])
println(ints.mkString(","))
val rdd1: RDD[String] = sc.makeRDD(List("hello", "world", "spark", "scala", "netty", "by"))
val strings: Array[String] = rdd1.takeOrdered(3)(ord = Ordering[String].reverse)
println(strings.mkString(","))
sc.stop()
}
}
打印结果:
1,2,3
world,spark,scala
|