Below is the source code for the Word Count program in Apache Spark -
import org.apache.spark.SparkContextimport org.apache.spark.SparkContext._import org.apache.spark._object SparkWordCount {def main(args: Array[String]) {val sc = new SparkContext( "local", "Word Count", "/usr/local/spark", Nil, Map(), Map())val input = sc.textFile("input.txt")val count = input.flatMap(line ⇒ line.split(" ")).map(word ⇒ (word, 1)).reduceByKey(_ + _)count.saveAsTextFile("outfile")System.out.println("OK");}}