Spark基础案例之单词统计WordCount
import org.apache.spark.rdd.RDD import org.apache.spark.{SparkConf, SparkContext} object WordCount { def main(args: Array[String]): Unit = { val conf = new SparkConf() conf.setMaster("local[1]") // local[*] conf.setAppName("单词计数") // 构造一个spark编程的入口对象 val sc = new SparkContext(conf) sc.textFile("data/wordcount/input/a.txt").flatMap(_.split("\s+")).map((_,1)).reduceByKey(_+_) .foreach(println) } }
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)