JavaSparkContext sc = new JavaSparkContext("local", "hbaseTest","/home/hadoop/spark-1.3.0",new String[0]);
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum",
"192.168.1.51,192.168.1.52,192.168.1.53,192.168.1.54,192.168.1.55,192.168.1.56,192.168.1.57,192.168.1.58,192.168.1.59");
conf.set("hbase.zookeeper.property.clientPort", "12181");
conf.setBoolean("mapred.map.tasks.speculative.execution", false);
conf.set(TableInputFormat.INPUT_TABLE, "students");
//获得hbase查询结果Result
JavaPairRDD<ImmutableBytesWritable, Result> hBaseRDD = sc.newAPIHadoopRDD(conf,
TableInputFormat.class, ImmutableBytesWritable.class,
Result.class);
System.out.println(hBaseRDD.first()); |