HADOOP

https://hadoop.apache.org/docs/stable/hadcd oop-project-dist/hadoop-common/SingleCluster.html

JAVA_HOME
export JAVA_HOME=/root/app/jdk1.8.0_161
export PATH=$JAVA_HOME/bin:$PATH
export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
hadoop-env.sh
JAVA_HOME=/root/app/jdk1.8.0_161
hdfs-site.xml
<property>
      <name>dfs.replication</name>
      <value>1</value>
  </property>
<property>
      <name>dfs.namenode.name.dir</name>
      <value>/root/app/data/dfs/namenode</value>
  </property>
<property>
      <name>dfs.datanode.name.data</name>
      <value>/root/app/data/dfs/datanode</value>
  </property>
core-site.xml
<property>
      <name>fs.defaultFS</name>
      <value>hdfs://localhost:9000</value>
</property>
<property>
      <name>hadoop.tmp.dir</name>
      <value>/root/app/data/dfs/tmp</value>
</property>
start
ssh-keygen -t rsa
cat id_rsa.pub >> authorized_keys
ssh localhost

 

./hdfs namenode -format    => bin
./start-dfs.sh   => sbin
./stop-dfs.sh   => sbin

HBASE

cp hdfs-site.xml ~/app/hbase-1.2.4/conf/
cp core-site.xml ~/app/hbase-1.2.4/conf/
hbase-env.sh

http://hbase.apache.org/book.html#quickstart

zk 不改

JAVA_HOME=/root/app/jdk1.8.0_161

# Configure PermSize. Only needed in JDK7. You can safely remove it for JDK8+
#export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -XX:PermSize=128m -XX:MaxPermSize=128m"
#export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -XX:PermSize=128m -XX:MaxPermSize=128m"
hbase-site.xml
<property>
    <name>hbase.rootdir</name>
    <value>hdfs://localhost:9000/hbase</value>
  </property>
  <property>
  <name>hbase.zookeeper.property.dataDir</name>
  <value>/root/app/data/zookeeper</value>
</property>
  <property>
    <name>hbase.cluster.distributed</name>
    <value>true</value>
  </property>
start
./start-hbase.sh   => bin
jps
./hbase shell

 

 

 

版权声明:本文为eaapple原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。
本文链接:https://www.cnblogs.com/eaapple/p/13513622.html