大家好:
我用CDH5.8下的spark-assembly-1.6.0-cdh5.8.0-hadoop2.6.0-cdh5.8.0.jar来搭建Spark开发环境,JDK是1.7.0_76(64位),scala是2.10.5,编写wordcount程序老报错误,cdh5.8集群安装环境下用的也是scala 2.10.5 ,jdk是1.7.0_67(64位,差一点,应该没有关系),有哪位大神遇到过不,求解?
spark开发环境
报错日志:
Error:scalac:
while compiling: E:\Spark\src\com\dt\spark\core\WordCount.scala
during phase: erasure
library version: version 2.10.5
compiler version: version 2.10.5
reconstructed args: -nobootcp -javabootclasspath ; -classpath C:\Program Files\Java\jdk1.7.0_76\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\deploy.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\ext\access-bridge-64.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\ext\dnsns.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\ext\jaccess.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\ext\localedata.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\ext\sunec.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\ext\sunjce_provider.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\ext\sunmscapi.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\ext\zipfs.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\javaws.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\jce.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\jfxrt.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\management-agent.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\plugin.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\resources.jar;C:\Program Files\Java\jdk1.7.0_76\jre\lib\rt.jar;E:\Spark\out\production\Spark;C:\Program Files (x86)\scala\lib\scala-actors-migration.jar;C:\Program Files (x86)\scala\lib\scala-actors.jar;C:\Program Files (x86)\scala\lib\scala-library.jar;C:\Program Files (x86)\scala\lib\scala-reflect.jar;C:\Program Files (x86)\scala\lib\scala-swing.jar;E:\CSOT-Software\spark-assembly-1.6.0-cdh5.8.0-hadoop2.6.0-cdh5.8.0.jar
last tree to typer: Ident(sc)
symbol: value sc (flags: <triedcooking>)
symbol definition: val sc: org.apache.spark.SparkContext
tpe: sc.type
symbol owners: value sc -> method main -> object WordCount -> package core
context owners: value conf -> method main -> object WordCount -> package core
== Enclosing template or block ==
DefDef( // def main(args: Array[String]): Unit in object WordCount
<method>
"main"
[]
// 1 parameter list
ValDef( // args: Array[String]
<param>
"args"
<tpt> // tree.tpe=Array[String]
<empty>
)
<tpt> // tree.tpe=runtime.BoxedUnit
Block( // tree.tpe=Unit
// 2 statements
ValDef( // val conf: org.apache.spark.SparkConf
<triedcooking>
"conf"
<tpt> // tree.tpe=org.apache.spark.SparkConf
Apply( // def setAppName(name: String): org.apache.spark.SparkConf in class SparkConf, tree.tpe=org.apache.spark.SparkConf
new org.apache.spark.SparkConf()."setAppName" // def setAppName(name: String): org.apache.spark.SparkConf in class SparkConf, tree.tpe=(name: String)org.apache.spark.SparkConf
"WordCount"
)
)
ValDef( // val sc: org.apache.spark.SparkContext
<triedcooking>
"sc"
<tpt> // tree.tpe=org.apache.spark.SparkContext
Apply( // def <init>(config: org.apache.spark.SparkConf): org.apache.spark.SparkContext in class SparkContext, tree.tpe=org.apache.spark.SparkContext
new org.apache.spark.SparkContext."<init>" // def <init>(config: org.apache.spark.SparkConf): org.apache.spark.SparkContext in class SparkContext, tree.tpe=(config: org.apache.spark.SparkConf)org.apache.spark.SparkContext
"conf" // val conf: org.apache.spark.SparkConf, tree.tpe=org.apache.spark.SparkConf
)
)
Apply( // def stop(): Unit in class SparkContext, tree.tpe=Unit
"sc"."stop" // def stop(): Unit in class SparkContext, tree.tpe=()Unit
Nil
)
)
)
== Expanded type of tree ==
SingleType(value sc)
uncaught exception during compilation: scala.reflect.internal.Types$TypeError
有需要上面的CDH的jar测试的话,可以说下怎么给你,太大了,上传不了!
|
|