import org.apache.spark.{SparkConf, SparkContext}
object WordCount {
defmain(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("hdfs:master:9000/test/test.txt")
val sc = new SparkContext(conf)
val input = sc.textFile("derby.log")
val words = input.flatMap(line =>line.split(" "))
val count = words.map(word => (word,1)).reduceByKey{ case (x,y) => x+y }
val out = count.saveAsTextFile("hdfs:master:9000/test/result")
}
}