Hive运行在Hadoop环境之上,所以须要hadoop环境,本次在安装在hadoop彻底分布式模式的namennode节点上css
请参考:hadoop搭建java
[hadoop@hadoop01 /home/hadoop]$cd /app/ [hadoop@hadoop01 /app]$wget http://archive.apache.org/dist/hive/stable-2/apache-hive-2.3.4-bin.tar.gz
[hadoop@hadoop01 /app]$cd /app [hadoop@hadoop01 /app]$ln -s apache-hive-2.3.4-bin hive [hadoop@hadoop01 /app]$tar zxvf apache-hive-2.3.4-bin.tar.gz
echo -e '##################HIVE环境变量配置#############\nexport HIVE_HOME=/app/hive\nexport PATH=$HIVE_HOME/bin:$PATH' >> ~/.bash_profile&& source ~/.bash_profile&&tail -3 ~/.bash_profile
[hadoop@hadoop01 /app/hive/conf]$cd /app/hive/conf/ [hadoop@hadoop01 /app/hive/conf]$cp hive-default.xml.template hive-site.xml [hadoop@hadoop01 /app]$cp hive-env.sh.template hive-env.sh
${system:java.io.tmpdir}
和${system:user.name}
[hadoop@hadoop01 /app/hive/conf]$sed -n 's#${system:java.io.tmpdir}#/app/hive.java.io.tmpdir#pg' hive-site.xml <value>/app/hive.java.io.tmpdir/${system:user.name}</value> <value>/app/hive.java.io.tmpdir/${hive.session.id}_resources</value> <value>/app/hive.java.io.tmpdir/${system:user.name}</value> <value>/app/hive.java.io.tmpdir/${system:user.name}/operation_logs</value> [hadoop@hadoop01 /app/hive/conf]$sed -n 's#${system:user.name}#hadoop#pg' hive-site.xml <value>/app/hive.java.io.tmpdir/hadoop</value> <value>/app/hive.java.io.tmpdir/hadoop</value> <value>/app/hive.java.io.tmpdir/hadoop/operation_logs</value>
[hadoop@hadoop01 /app/hive/conf]$sed -i 's#${system:java.io.tmpdir}#/app/hive.java.io.tmpdir#g' hive-site.xml [hadoop@hadoop01 /app/hive/conf]$sed -i 's#${system:user.name}#hadoop#g' hive-site.xml
[hadoop@s101 /app/hive/conf]$grep 'hive.java.io.tmpdir' hive-site.xml <value>/app/hive.java.io.tmpdir/${system:user.name}</value> <value>/app/hive.java.io.tmpdir/${hive.session.id}_resources</value> <value>/app/hive.java.io.tmpdir/${system:user.name}</value> <value>/app/hive.java.io.tmpdir/${system:user.name}/operation_logs</value> [hadoop@s101 /app/hive/conf]$grep 'hive.java.io.tmpdir/hadoop' hive-site.xml <value>/app/hive.java.io.tmpdir/hadoop</value> <value>/app/hive.java.io.tmpdir/hadoop</value> <value>/app/hive.java.io.tmpdir/hadoop/operation_logs</value>
[hadoop@hadoop01 /app]$mkdir -p /app/hive.java.io.tmpdir/hadoop
[hadoop@hadoop01 /home/hadoop]$schematool -initSchema -dbType derby
.......
Metastore connection URL: jdbc:derby:;databaseName=metastore_db;create=true
Metastore Connection Driver : org.apache.derby.jdbc.EmbeddedDriver
Metastore connection User: APP
Starting metastore schema initialization to 2.3.0
Initialization script hive-schema-2.3.0.derby.sql
Initialization script completed
schemaTool completednode
[hadoop@hadoop01 /home/hadoop]$ll /home/hadoop/metastore_db/
[hadoop@hadoop01 /home/hadoop]$hdfs dfs -ls /tmp
Found 2 items drwx-wx-wx - hadoop supergroup 0 2018-11-26 19:12 /tmp/hive
[hadoop@hadoop01 /home/hadoop]start-dfs.sh [hadoop@hadoop01 /home/hadoop]start-yarn.sh
[hadoop@hadoop01 /home/hadoop]$hive ... hive>
[hadoop@hadoop01 /home/hadoop]$vim /app/hive/conf/hive-site.xmlmysql
<property> <name>javax.jdo.option.ConnectionURL</name> <value>jdbc:mysql://hadoop01:3306/hive</value> </property> <property> <name>javax.jdo.option.ConnectionDriverName</name> <value>com.mysql.jdbc.Driver</value> </property> <property> <name>javax.jdo.option.ConnectionUserName</name> <value>hive</value> </property> <property> <name>javax.jdo.option.ConnectionPassword</name> <value>123456</value> </property>
schematool -initSchema -dbType mysql
docker