操作用户:hadoop

操作目录:/home/hadoop/apps

操作机器:hadoop1


1. 下载

wget https://archive.apache.org/dist/hive/hive-3.1.2/apache-hive-3.1.2-bin.tar.gz

2. 解压、重命名

# 解压
tar -zxvf https://archive.apache.org/dist/hive/hive-3.1.2/apache-hive-3.1.2-bin.tar.gz

# 重命名
mv apache-hive-3.1.2-bin.tar.gz hive-3.1.2


3. 配置环境变量

vim /etc/profile
export HIVE_HOME=/home/hadoop/apps/hive-3.1.2
export PATH=$PATH:${HIVE_HOME}/bin

source /etc/profile


4. 修改配置文件

cd /home/hadoop/apps/hive-3.1.2/conf
cp hive-env.sh.template hive-env.sh
cp hive-default.xml.template hive-site.xml

# 修改hive-env.sh
vim hive-env.sh
export JAVA_HOME=/opt/jdk1.8.0_212
export HADOOP_HOME=/home/hadoop/apps/hadoop-3.1.3
export HIVE_CONF_DIR=/home/hadoop/apps/hive-3.1.2/conf
export HIVE_AUX_JARS_PATH=/home/hadoop/apps/hive-3.1.2/lib

# 修改hive-site.xml
vim hive-site.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://hadoop3:3306/hive?useUnicode=true&characterEncoding=utf8&useSSL=false</value>
<description>JDBC connect string for a JDBC metastore</description>
</property>

<property>
<name>hive.metastore.schema.verification</name>
<value>false</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.cj.jdbc.Driver</value>
<description>Driver class name for a JDBC metastore</description>
</property>

<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
<description>username to use against metastore database</description>
</property>

<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>qwert123</value>
<description>password to use against metastore database</description>
</property>
<property>
<name>datanucleus.schema.autoCreateAll</name>
<value>true</value>
</property>
<!-- 指定存储元数据要连接的地址 -->
<property>
<name>hive.metastore.uris</name>
<value>thrift://hadoop1:9083</value>
</property>
<!-- 指定 hiveserver2 连接的 host -->
<property>
<name>hive.server2.thrift.bind.host</name>
<value>hadoop1</value>
</property>
<!-- 指定 hiveserver2 连接的端口号 -->
<property>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<property>
<name>hive.server2.active.passive.ha.enable</name>
<value>true</value>
</property>
</configuration>


5. 初始化数据库

1. 在mysql中创建hive数据库,用于存储hive的表和元数据。
create database hive;

2. 在下载mysql驱动包:mysql-connector-java-8.0.16.jar,将jar放到hive的lib目录下。

3. 初始化数据库命令:
schematool -initSchema -dbType mysql -verbose

4. 防止日志的jar冲突问题,在hive的lib目录下执行
mv log4j-slf4j-impl-2.10.0.jar log4j-slf4j-impl-2.10.0.jar.bak


6. 启动hive metasotre

# 直接启动
hive --service metastore
# 后台启动
nohup hive --service metastore >/dev/null 2>&1 &


7. 启动hive

hive

Hive-3.1.2部署文档_hive


8. 启动hiveserver2

# 直接启动
hive --service hiveserver2
# 后台启动
nohup hive --service hiveserver2 >/dev/null 2>&1 &


9. beeline连接

beeline -u jdbc:hive2://hadoop1:10000/default -n hadoop

Hive-3.1.2部署文档_hadoop_02