HADOOP
https://hadoop.apache.org/docs/stable/hadcd oop-project-dist/hadoop-common/SingleCluster.html
JAVA_HOME
export JAVA_HOME=/root/app/jdk1.8.0_161
export PATH=$JAVA_HOME/bin:$PATH
export CLAsspATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
hadoop-env.sh
JAVA_HOME=/root/app/jdk1.8.0_161
hdfs-site.xml
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>/root/app/data/dfs/namenode</value>
</property>
<property>
<name>dfs.datanode.name.data</name>
<value>/root/app/data/dfs/datanode</value>
</property>
core-site.xml
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/root/app/data/dfs/tmp</value>
</property>
start
ssh-keygen -t rsa
cat id_rsa.pub >> authorized_keys
ssh localhost
./hdfs namenode -format => bin
./start-dfs.sh => sbin
./stop-dfs.sh => sbin
HBASE
cp hdfs-site.xml ~/app/hbase-1.2.4/conf/
cp core-site.xml ~/app/hbase-1.2.4/conf/
hbase-env.sh
http://hbase.apache.org/book.html#quickstart
zk 不改
JAVA_HOME=/root/app/jdk1.8.0_161
# Configure PermSize. Only needed in JDK7. You can safely remove it for JDK8+
#export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -XX:PermSize=128m -XX:MaxPermSize=128m"
#export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -XX:PermSize=128m -XX:MaxPermSize=128m"
hbase-site.xml
<property> <name>hbase.rootdir</name> <value>hdfs://localhost:9000/hbase</value> </property> <property> <name>hbase.zookeeper.property.dataDir</name> <value>/root/app/data/zookeeper</value> </property> <property> <name>hbase.cluster.distributed</name> <value>true</value> </property>
start
./start-hbase.sh => bin jps ./hbase shell
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。