[mw_shl_code=bash,true]import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
object SimpleApp {
def main(args: Array[String]) {
val logFile = "/home/spark/opt/spark-1.2.0-bin-hadoop2.4/README.md" // Should be some file on your system
val conf = new SparkConf().setAppName("Simple Application")
val sc = new SparkContext(conf)
val logData = sc.textFile(logFile, 2).cache()
val numAs = logData.filter(line => line.contains("a")).count()
val numBs = logData.filter(line => line.contains("b")).count()
println("Lines with a: %s, Lines with b: %s".format(numAs, numBs))
}
}[/mw_shl_code]打包文件:
File-->>ProjectStructure -->点击Artificats-->>点击绿色加号 --> 点击JAR-->>选择 From module with dependices