新建一个Java工程,将hadoop相关的jar包,hbase相关的jar包和spark-assembly-1.0.2-hadoop2.2.0.jar都导入工程中,然后直接复制粘贴的sparkpi例子中的代码,添加了master之后,右键run as Java Application,结果报一下错误:
Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 0.0:0 failed 4 times, most recent failure: Exception failure in TID 6 on host TMaster2.Hadoop: java.lang.ClassNotFoundException: com.qtong.spark.test.SparkHBase$1
java.net.URLClassLoader$1.run(URLClassLoader.java:202)
java.security.AccessController.doPrivileged(Native Method)
java.net.URLClassLoader.findClass(URLClassLoader.java:190)
java.lang.ClassLoader.loadClass(ClassLoader.java:306)
java.lang.ClassLoader.loadClass(ClassLoader.java:247)
java.lang.Class.forName0(Native Method)
java.lang.Class.forName(Class.java:249)
org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:60)
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1589)
java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1494)
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1748)
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1969)
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1969)
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
java.io.ObjectInputStream.readObject(ObjectInputStream.java:349)
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
org.apache.spark.scheduler.ResultTask$.deserializeInfo(ResultTask.scala:61)
org.apache.spark.scheduler.ResultTask.readExternal(ResultTask.scala:141)
java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1814)
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1773)
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
java.io.ObjectInputStream.readObject(ObjectInputStream.java:349)
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:85)
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:165)
java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
java.lang.Thread.run(Thread.java:662)
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1049)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1033)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1031)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1031)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:635)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:635)
at scala.Option.foreach(Option.scala:236)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:635)
at org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1234)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
at akka.actor.ActorCell.invoke(ActorCell.scala:456)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
at akka.dispatch.Mailbox.run(Mailbox.scala:219)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)