package day06 import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.rdd.RDD object MyTransformation_scala_3 { def myInsertsection(sc: SparkContext): String = { val RDD1 = sc.parallelize(List("aa", "bb", "cc")) val RDD2 = sc.parallelize(List("aa", "cc")) val insertsectionRDD: RDD[(String)] = RDD1.intersection(RDD2) insertsectionRDD.collect().toList.toString() } def main(args: Array[String]) { val conf: SparkConf = new SparkConf().setMaster("local").setAppName("MyTransformation_scala_3") var sc: SparkContext = new SparkContext(conf) var result: String = "" result = myInsertsection(sc) println(result) sc.stop() } }
spark转换算子求交集SCALA实现(insertsection)
猜你喜欢
转载自blog.csdn.net/wjn19921104/article/details/80230403
今日推荐
周排行