val相依=new SparkConf () .setAppName .setMaster (“RDDDemo”)(“本地”)
val sc=new SparkContext(参看)
val arr1=数组(Tuple2(1,“火花”),Tuple2 (2“Hadoop”), Tuple2(3,“超光速粒子”))
val arr2=数组(Tuple2 (100), Tuple2 (70), Tuple2 (90)
val rdd1=sc.parallelize (arr1)
val rdd2=sc.parallelize (arr2)
(rdd2)
rdd3.collect () .foreach println ()
名称:[速子]
val相依=new SparkConf () .setAppName .setMaster (“RDDDemo”)(“本地”)
val sc=new SparkContext(参看)
val arr1=数组(Tuple2(1,“火花”),Tuple2 (2“Hadoop”), Tuple2(3,“超光速粒子”))
val arr2=数组(Tuple2 (100), Tuple2 (70), Tuple2 (90), Tuple2 (95), Tuple2 (65), Tuple2 (110)
val rdd1=sc.parallelize (arr1)
val rdd2=sc.parallelize (arr2)
val rdd3=rdd1。(rdd2)
rdd3.collect () .foreach println ()
sc.stop ()