分享

求大神解决问题-hive-mapreduce问题

追随云科技 发表于 2016-5-23 20:15:49 [显示全部楼层] 回帖奖励 阅读模式 关闭右栏 7 31730
2016-05-23 16:34:49,881 INFO  [main]: log.PerfLogger (PerfLogger.java:PerfLogEnd(163)) - </PERFLOG method=serializePlan start=1463992489796 end=1463992489881 duration=85 from=org.apache.hadoop.hive.ql.exec.Uti
lities>
2016-05-23 16:34:49,887 ERROR [main]: mr.ExecDriver (ExecDriver.java:execute(398)) - yarn
2016-05-23 16:34:49,972 INFO  [main]: client.RMProxy (RMProxy.java:createRMProxy(98)) - Connecting to ResourceManager at hadoopnamenode/172.29.140.171:8032
2016-05-23 16:34:50,026 ERROR [main]: jdbc.JDBCStatsPublisher (JDBCStatsPublisher.java:init(315)) - Error during JDBC initialization.
com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException: Specified key was too long; max key length is 1000 bytes
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
        at com.mysql.jdbc.Util.handleNewInstance(Util.java:406)
        at com.mysql.jdbc.Util.getInstance(Util.java:381)
        at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1030)
        at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:956)
        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3515)
        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3447)
        at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1951)
        at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2101)
        at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2548)
        at com.mysql.jdbc.StatementImpl.executeUpdate(StatementImpl.java:1605)
        at com.mysql.jdbc.StatementImpl.executeUpdate(StatementImpl.java:1524)
        at org.apache.hadoop.hive.ql.stats.jdbc.JDBCStatsPublisher.init(JDBCStatsPublisher.java:291)
        at org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:409)
        at org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:137)
        at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
        at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
        at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1774)
        at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1531)
        at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1311)
        at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1120)
        at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1108)
        at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:218)
        at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:170)
        at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:381)
        at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:773)
        at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:691)
        at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:626)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
        at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
2016-05-23 16:34:50,118 INFO  [main]: client.RMProxy (RMProxy.java:createRMProxy(98)) - Connecting to ResourceManager at hadoopnamenode/172.29.140.171:8032
2016-05-23 16:34:50,131 INFO  [main]: exec.Utilities (Utilities.java:getBaseWork(385)) - PLAN PATH = hdfs://hadoopnamenode:9000/tmp/hive/root/a6d9d3fd-fbcc-4534-b86f-a8c45d3f7d79/hive_2016-05-23_16-34-49_214_6
96142450541499542-1/-mr-10003/a6b2ae0c-ff62-44b2-b3dc-2707f0e02882/map.xml2016-05-23 16:34:50,132 INFO  [main]: exec.Utilities (Utilities.java:getBaseWork(385)) - PLAN PATH = hdfs://hadoopnamenode:9000/tmp/hive/root/a6d9d3fd-fbcc-4534-b86f-a8c45d3f7d79/hive_2016-05-23_16-34-49_214_6
96142450541499542-1/-mr-10003/a6b2ae0c-ff62-44b2-b3dc-2707f0e02882/reduce.xml2016-05-23 16:34:50,133 INFO  [main]: exec.Utilities (Utilities.java:getBaseWork(395)) - ***************non-local mode***************
2016-05-23 16:34:50,133 INFO  [main]: exec.Utilities (Utilities.java:getBaseWork(399)) - local path = hdfs://hadoopnamenode:9000/tmp/hive/root/a6d9d3fd-fbcc-4534-b86f-a8c45d3f7d79/hive_2016-05-23_16-34-49_214_
696142450541499542-1/-mr-10003/a6b2ae0c-ff62-44b2-b3dc-2707f0e02882/reduce.xml2016-05-23 16:34:50,134 INFO  [main]: exec.Utilities (Utilities.java:getBaseWork(411)) - Open file to read in plan: hdfs://hadoopnamenode:9000/tmp/hive/root/a6d9d3fd-fbcc-4534-b86f-a8c45d3f7d79/hive_2016-05-23
_16-34-49_214_696142450541499542-1/-mr-10003/a6b2ae0c-ff62-44b2-b3dc-2707f0e02882/reduce.xml2016-05-23 16:34:50,142 INFO  [main]: exec.Utilities (Utilities.java:getBaseWork(445)) - File not found: File does not exist: /tmp/hive/root/a6d9d3fd-fbcc-4534-b86f-a8c45d3f7d79/hive_2016-05-23_16-34-49_214_69
6142450541499542-1/-mr-10003/a6b2ae0c-ff62-44b2-b3dc-2707f0e02882/reduce.xml        at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:66)
        at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1934)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:1875)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1855)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1827)
        at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:566)
        at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:88)
        at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:361)
        at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)

2016-05-23 16:34:50,142 INFO  [main]: exec.Utilities (Utilities.java:getBaseWork(446)) - No plan file found: hdfs://hadoopnamenode:9000/tmp/hive/root/a6d9d3fd-fbcc-4534-b86f-a8c45d3f7d79/hive_2016-05-23_16-34-
49_214_696142450541499542-1/-mr-10003/a6b2ae0c-ff62-44b2-b3dc-2707f0e02882/reduce.xml2016-05-23 16:34:50,171 WARN  [main]: mapreduce.JobResourceUploader (JobResourceUploader.java:uploadFiles(64)) - Hadoop command-line option parsing not performed. Implement the Tool interface and execute your
application with ToolRunner to remedy this.2016-05-23 16:34:50,771 INFO  [main]: log.PerfLogger (PerfLogger.java:PerfLogBegin(122)) - <PERFLOG method=getSplits from=org.apache.hadoop.hive.ql.io.CombineHiveInputFormat>
2016-05-23 16:34:50,772 INFO  [main]: exec.Utilities (Utilities.java:getBaseWork(385)) - PLAN PATH = hdfs://hadoopnamenode:9000/tmp/hive/root/a6d9d3fd-fbcc-4534-b86f-a8c45d3f7d79/hive_2016-05-23_16-34-49_214_6
96142450541499542-1/-mr-10003/a6b2ae0c-ff62-44b2-b3dc-2707f0e02882/map.xml2016-05-23 16:34:50,773 INFO  [main]: io.CombineHiveInputFormat (CombineHiveInputFormat.java:getSplits(519)) - Total number of paths: 1, launching 1 threads to check non-combinable ones.
2016-05-23 16:34:50,790 INFO  [main]: io.CombineHiveInputFormat (CombineHiveInputFormat.java:getCombineSplits(441)) - CombineHiveInputSplit creating pool for hdfs://hadoopnamenode:9000/hive/warehouse/dggj.db/t
_dgsksj; using filter path hdfs://hadoopnamenode:9000/hive/warehouse/dggj.db/t_dgsksj2016-05-23 16:34:50,808 INFO  [main]: input.FileInputFormat (FileInputFormat.java:listStatus(283)) - Total input paths to process : 1
2016-05-23 16:34:50,811 INFO  [main]: input.CombineFileInputFormat (CombineFileInputFormat.java:createSplits(413)) - DEBUG: Terminated node allocation with : CompletedNodes: 3, size left: 0
2016-05-23 16:34:50,812 INFO  [main]: io.CombineHiveInputFormat (CombineHiveInputFormat.java:getCombineSplits(496)) - number of splits 1
2016-05-23 16:34:50,813 INFO  [main]: io.CombineHiveInputFormat (CombineHiveInputFormat.java:getSplits(589)) - Number of all splits 1
2016-05-23 16:34:50,814 INFO  [main]: log.PerfLogger (PerfLogger.java:PerfLogEnd(163)) - </PERFLOG method=getSplits start=1463992490771 end=1463992490814 duration=43 from=org.apache.hadoop.hive.ql.io.CombineHi
veInputFormat>2016-05-23 16:34:50,955 INFO  [main]: mapreduce.JobSubmitter (JobSubmitter.java:submitJobInternal(202)) - number of splits:1
2016-05-23 16:34:51,221 INFO  [main]: mapreduce.JobSubmitter (JobSubmitter.java:printTokens(291)) - Submitting tokens for job: job_1463710956255_0008
2016-05-23 16:34:51,303 INFO  [main]: impl.YarnClientImpl (YarnClientImpl.java:submitApplication(251)) - Submitted application application_1463710956255_0008
2016-05-23 16:34:51,315 INFO  [main]: mapreduce.Job (Job.java:submit(1311)) - The url to track the job: http://hadoopnamenode:8088/proxy/application_1463710956255_0008/
2016-05-23 16:34:51,316 INFO  [main]: exec.Task (SessionState.java:printInfo(927)) - Starting Job = job_1463710956255_0008, Tracking URL = http://hadoopnamenode:8088/proxy/application_1463710956255_0008/
2016-05-23 16:34:51,317 INFO  [main]: exec.Task (SessionState.java:printInfo(927)) - Kill Command = /opt/hadoop/hadoop-2.6.0/bin/hadoop job  -kill job_1463710956255_0008
2016-05-23 16:35:04,313 INFO  [main]: exec.Task (SessionState.java:printInfo(927)) - Hadoop job information for Stage-1: number of mappers: 1; number of reducers: 0
2016-05-23 16:35:04,535 WARN  [main]: mapreduce.Counters (AbstractCounters.java:getGroup(234)) - Group org.apache.hadoop.mapred.Task$Counter is deprecated. Use org.apache.hadoop.mapreduce.TaskCounter instead
2016-05-23 16:35:04,536 INFO  [main]: exec.Task (SessionState.java:printInfo(927)) - 2016-05-23 16:35:04,534 Stage-1 map = 0%,  reduce = 0%
2016-05-23 16:35:21,366 INFO  [main]: exec.Task (SessionState.java:printInfo(927)) - 2016-05-23 16:35:21,366 Stage-1 map = 100%,  reduce = 0%
2016-05-23 16:35:23,652 ERROR [main]: exec.Task (SessionState.java:printError(936)) - Ended Job = job_1463710956255_0008 with errors
2016-05-23 16:35:23,654 ERROR [Thread-55]: exec.Task (SessionState.java:printError(936)) - Error during job, obtaining debugging information...
2016-05-23 16:35:23,668 ERROR [Thread-56]: exec.Task (SessionState.java:printError(936)) - Examining task ID: task_1463710956255_0008_m_000000 (and more) from job job_1463710956255_0008
2016-05-23 16:35:23,668 WARN  [Thread-56]: shims.HadoopShimsSecure (Hadoop23Shims.java:getTaskAttemptLogUrl(151)) - Can't fetch tasklog: TaskLogServlet is not supported in MR2 mode.
2016-05-23 16:35:23,695 WARN  [Thread-56]: shims.HadoopShimsSecure (Hadoop23Shims.java:getTaskAttemptLogUrl(151)) - Can't fetch tasklog: TaskLogServlet is not supported in MR2 mode.
2016-05-23 16:35:23,701 WARN  [Thread-56]: shims.HadoopShimsSecure (Hadoop23Shims.java:getTaskAttemptLogUrl(151)) - Can't fetch tasklog: TaskLogServlet is not supported in MR2 mode.
2016-05-23 16:35:23,707 WARN  [Thread-56]: shims.HadoopShimsSecure (Hadoop23Shims.java:getTaskAttemptLogUrl(151)) - Can't fetch tasklog: TaskLogServlet is not supported in MR2 mode.
2016-05-23 16:35:23,720 ERROR [Thread-55]: exec.Task (SessionState.java:printError(936)) -
Task with the most failures(4):
-----
Task ID:
  task_1463710956255_0008_m_000000

URL:
  http://hadoopnamenode:8088/taskdetails.jsp?jobid=job_1463710956255_0008&tipid=task_1463710956255_0008_m_000000
-----
Diagnostic Messages for this Task:
Exception from container-launch.
Container id: container_1463710956255_0008_01_000005
Exit code: 1
Stack trace: ExitCodeException exitCode=1:
        at org.apache.hadoop.util.Shell.runCommand(Shell.java:561)
        at org.apache.hadoop.util.Shell.run(Shell.java:478)
        at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:738)
        at org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor.launchContainer(DefaultContainerExecutor.java:211)
        at org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:302)
        at org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:82)
        at java.util.concurrent.FutureTask.run(FutureTask.java:262)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)


Container exited with a non-zero exit code 1


2016-05-23 16:35:23,761 INFO  [main]: impl.YarnClientImpl (YarnClientImpl.java:killApplication(364)) - Killed application application_1463710956255_0008
2016-05-23 16:35:23,773 ERROR [main]: ql.Driver (SessionState.java:printError(936)) - FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
2016-05-23 16:35:23,773 INFO  [main]: log.PerfLogger (PerfLogger.java:PerfLogEnd(163)) - </PERFLOG method=Driver.execute start=1463992489734 end=1463992523773 duration=34039 from=org.apache.hadoop.hive.ql.Driv
er>2016-05-23 16:35:23,774 INFO  [main]: ql.Driver (SessionState.java:printInfo(927)) - MapReduce Jobs Launched:
2016-05-23 16:35:23,775 WARN  [main]: mapreduce.Counters (AbstractCounters.java:getGroup(234)) - Group FileSystemCounters is deprecated. Use org.apache.hadoop.mapreduce.FileSystemCounter instead
2016-05-23 16:35:23,776 INFO  [main]: ql.Driver (SessionState.java:printInfo(927)) - Stage-Stage-1: Map: 1   HDFS Read: 0 HDFS Write: 0 FAIL
2016-05-23 16:35:23,777 INFO  [main]: ql.Driver (SessionState.java:printInfo(927)) - Total MapReduce CPU Time Spent: 0 msec
2016-05-23 16:35:23,777 INFO  [main]: ql.Driver (Driver.java:execute(1696)) - Completed executing command(queryId=root_20160523162626_74843f4a-92e6-4f52-ba51-37e987a83483); Time taken: 34.039 seconds
2016-05-23 16:35:23,778 INFO  [main]: log.PerfLogger (PerfLogger.java:PerfLogBegin(122)) - <PERFLOG method=releaseLocks from=org.apache.hadoop.hive.ql.Driver>
2016-05-23 16:35:23,778 INFO  [main]: log.PerfLogger (PerfLogger.java:PerfLogEnd(163)) - </PERFLOG method=releaseLocks start=1463992523778 end=1463992523778 duration=0 from=org.apache.hadoop.hive.ql.Driver>

已有(7)人评论

跳转到指定楼层
追随云科技 发表于 2016-5-23 20:17:04
hive-site.xml   配置如下

<configuration>


<property>
  <name>hive.metastore.local</name>
  <value>false</value>
  <description>controls whether to connect to remove metastore server or open a new metastore server in Hive Client JVM</description>
</property>

<property>
  <name>javax.jdo.option.ConnectionURL</name>
  <value>jdbc:mysql://hadoopnamenode:3306/metastore?useUnicode=true&amp;characterEncoding=utf8&amp;user=root&amp;password=root&amp;createDatabaseIfNotExist=true</value>
  <description>JDBC connect string for a JDBC metastore</description>
</property>

<property>
  <name>javax.jdo.option.ConnectionDriverName</name>
  <value>com.mysql.jdbc.Driver</value>
  <description>Driver class name for a JDBC metastore</description>
</property>

<property>
  <name>javax.jdo.option.ConnectionUserName</name>
  <value>root</value>
  <description>username to use against metastore database</description>
</property>

<property>
  <name>javax.jdo.option.ConnectionPassword</name>
  <value>root</value>
  <description>password to use against metastore database</description>
</property>

<property>
  <name>hive.stats.dbconnectionstring</name>
  <value>jdbc:mysql://hadoopnamenode:3306/hive?useUnicode=true&amp;characterEncoding=utf8&amp;user=root&amp;password=root&amp;createDatabaseIfNotExist=true</value>
  <description>The default connection string for the database that stores temporary hive statistics.</description>
</property>

<property>
  <name>hive.stats.dbclass</name>
  <value>jdbc:mysql</value>
  <description>The default database that stores temporary hive statistics.</description>
</property>

<property>
  <name>hive.stats.jdbcdriver</name>
  <value>com.mysql.jdbc.Driver</value>
  <description>The JDBC driver for the database that stores temporary hive statistics.</description>
</property>

<property>
  <name>hive.metastore.uris</name>
  <value>thrift://hadoopnamenode:9083</value>
</property>

<property>
  <name>hive.metastore.warehouse.dir</name>
  <value>/hive/warehouse/</value>
</property>


</configuration>
回复

使用道具 举报

追随云科技 发表于 2016-5-23 20:18:33
已经尝试 修改 mysql 的 charset:

show variables like 'character_set_%';

SET character_set_client = utf8 ;
SET character_set_connection = utf8 ;
SET character_set_database = utf8 ;
SET character_set_results = utf8 ;
SET character_set_server = utf8 ;





show variables like 'collation_%';


SET collation_connection = utf8_general_ci ;
SET collation_database = utf8_general_ci ;
SET collation_server = utf8_general_ci ;


SET NAMES 'utf8';  


alter database metastore character set utf8;

alter database hive character set utf8;

全部都执行了,但是就是不行
回复

使用道具 举报

nextuser 发表于 2016-5-23 20:53:35
<property>
  <name>javax.jdo.option.ConnectionURL</name>
  <value>jdbc:mysql://hadoopnamenode:3306/metastore?useUnicode=true&characterEncoding=utf8&user=root&password=root&createDatabaseIfNotExist=true</value>
  <description>JDBC connect string for a JDBC metastore</description>
</property>


<property>
  <name>javax.jdo.option.ConnectionDriverName</name>
  <value>com.mysql.jdbc.Driver</value>
  <description>Driver class name for a JDBC metastore</description>
</property>

<property>
  <name>javax.jdo.option.ConnectionUserName</name>
  <value>root</value>
  <description>username to use against metastore database</description>
</property>

<property>
  <name>javax.jdo.option.ConnectionPassword</name>
  <value>root</value>
  <description>password to use against metastore database</description>
</property>

<property>
  <name>hive.stats.dbconnectionstring</name>
  <value>jdbc:mysql://hadoopnamenode:3306/hive?useUnicode=true&characterEncoding=utf8&user=root&password=root&createDatabaseIfNotExist=true</value>
  <description>The default connection string for the database that stores temporary hive statistics.</description>
</property>


<property>
  <name>hive.stats.dbclass</name>
  <value>jdbc:mysql</value>
  <description>The default database that stores temporary hive statistics.</description>
</property>

红字部分需修改下,粉红的含义是什么。

参考,最好参考这个,改下配置
hive2.0安装总结
http://www.aboutyun.com/forum.php?mod=viewthread&tid=18361




回复

使用道具 举报

追随云科技 发表于 2016-5-24 14:04:18
我这边是3个节点的集群环境,hive用的cdh5.7.0 版本,但是  hive-site.xml 怎么配置????你那个是 2.0 配置,不适用我这个吧
回复

使用道具 举报

xw2016 发表于 2016-5-24 22:49:09
追随云科技 发表于 2016-5-24 14:04
我这边是3个节点的集群环境,hive用的cdh5.7.0 版本,但是  hive-site.xml 怎么配置????你那个是 2.0  ...

3节点和2节点差不多的,我部署成功了,可以参考下:
http://www.aboutyun.com/home.php ... do=blog&id=2979
回复

使用道具 举报

xw2016 发表于 2016-5-24 22:51:07
前面我也遇到mapreduce问题,是hadoop集成部署时,yarn-site.xml配置问题,你可以先在hadoop集群上执行mapreduce程序测试一下看看有没有问题,缩小找错的范围。
回复

使用道具 举报

追随云科技 发表于 2016-5-25 09:29:50
嗯,确实是在启动yarn 的时候报错了,我还在找错误原因,不知道哪个地方配置错了,急需要一个参考文档
回复

使用道具 举报

您需要登录后才可以回帖 登录 | 立即注册

本版积分规则

关闭

推荐上一条 /2 下一条