分享

hadoop伪分布式基本配置

lzw 2013-12-15 20:00:04 发表于 安装配置 [显示全部楼层] 回帖奖励 阅读模式 关闭右栏 0 4737
  • 安装目录
    1. /home/hadoop/hadoop-1.0.4
    复制代码
  • 配置hosts
    1. 192.168.0.101   hadoop.master
    复制代码
  • 配置core-site.xml
    1. <?xml version="1.0"?>
    2. <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
    3. <!-- Put site-specific property overrides in this file. -->
    4. <configuration>
    5. <property>
    6.   <name>fs.default.name</name>
    7.   <value>hdfs://hadoop.master:9000</value>
    8. </property>
    9. <property>
    10.   <name>hadoop.tmp.dir</name>
    11.   <value>/home/hadoop/tmp</value>
    12. </property>
    13. </configuration>
    复制代码
  • 配置hdfs-site.xml
    1. <?xml version="1.0"?>
    2. <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
    3. <!-- Put site-specific property overrides in this file. -->
    4. <configuration>
    5. <property>
    6.    <name>dfs.replication</name>
    7.    <value>1</value>
    8. </property>
    9. <property>
    10.   <name>dfs.name.dir</name>
    11.   <value>/home/hadoop/hdfs/name</value>
    12. </property>
    13. <property>
    14.   <name>dfs.data.dir</name>
    15.   <value>/home/hadoop/hdfs/data</value>
    16. </property>
    17. </configuration>
    复制代码
  • 配置mapred-site.xml
    1. <?xml version="1.0"?>
    2. <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
    3. <!-- Put site-specific property overrides in this file. -->
    4. <configuration>
    5. <property>
    6.    <name>mapred.job.tracker</name>
    7.    <value>hadoop.master:9001</value>
    8. </property>
    9. </configuration>
    复制代码
  • 配置hadoop-env.sh
    1. # Set Hadoop-specific environment variables here.
    2. # The only required environment variable is JAVA_HOME.  All others are
    3. # optional.  When running a distributed configuration it is best to
    4. # set JAVA_HOME in this file, so that it is correctly defined on
    5. # remote nodes.
    6. # The java implementation to use.  Required.
    7. export JAVA_HOME=/usr/jdk1.6.0_22
    8. # Extra Java CLASSPATH elements.  Optional.
    9.   export HADOOP_HOME=/home/hadoop/hadoop-1.0.4
    10. # export HADOOP_CLASSPATH=
    11. # The maximum amount of heap to use, in MB. Default is 1000.
    12. # export HADOOP_HEAPSIZE=2000
    13. # Extra Java runtime options.  Empty by default.
    14. # export HADOOP_OPTS=-server
    15. # Command specific options appended to HADOOP_OPTS when specified
    16. export HADOOP_NAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_NAMENODE_OPTS"
    17. export HADOOP_SECONDARYNAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_SECONDARYNAMENODE_OPTS"
    18. export HADOOP_DATANODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_DATANODE_OPTS"
    19. export HADOOP_BALANCER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_BALANCER_OPTS"
    20. export HADOOP_JOBTRACKER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_JOBTRACKER_OPTS"
    21. # export HADOOP_TASKTRACKER_OPTS=
    22. # The following applies to multiple commands (fs, dfs, fsck, distcp etc)
    23. # export HADOOP_CLIENT_OPTS
    24. # Extra ssh options.  Empty by default.
    25. # export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
    26. # Where log files are stored.  $HADOOP_HOME/logs by default.
    27. # export HADOOP_LOG_DIR=${HADOOP_HOME}/logs
    28. # File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
    29. # export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
    30. # host:path where hadoop code should be rsync'd from.  Unset by default.
    31. # export HADOOP_MASTER=master:/home/$USER/src/hadoop
    32. # Seconds to sleep between slave commands.  Unset by default.  This
    33. # can be useful in large clusters, where, e.g., slave rsyncs can
    34. # otherwise arrive faster than the master can service them.
    35. # export HADOOP_SLAVE_SLEEP=0.1
    36. # The directory where pid files are stored. /tmp by default.
    37. # export HADOOP_PID_DIR=/var/hadoop/pids
    38. # A string representing this instance of hadoop. $USER by default.
    39. # export HADOOP_IDENT_STRING=$USER
    40. # The scheduling priority for daemon processes.  See 'man nice'.
    41. # export HADOOP_NICENESS=10
    42.   export PATH=$PATH:$HADOOP_HOME/bin
    复制代码


欢迎加入about云群9037177932227315139327136 ,云计算爱好者群,亦可关注about云腾讯认证空间||关注本站微信

没找到任何评论,期待你打破沉寂

您需要登录后才可以回帖 登录 | 立即注册

本版积分规则

关闭

推荐上一条 /2 下一条