解压安装
tar zxvf apache-hive-3.1.2-bin.tar.gz -C /opt/module/
配置环境变量
echo 'export HIVE_HOME=/opt/module/apache-hive-3.1.2-bin' >> /etc/profile
echo 'export PATH=$PATH:$HIVE_HOME/bin' >> /etc/profile
# 读取环境变量
source /etc/profile
初始化derby元数据
bin/schematool -dbType derby -initSchema
#报错
[root@node01 software]# /opt/module/apache-hive-3.1.2-bin/bin/schematool -dbType derby -initSchema
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/opt/module/apache-hive-3.1.2-bin/lib/log4j-slf4j-impl-2.10.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/opt/module/hadoop-3.1.4/share/hadoop/common/lib/slf4j-log4j12-1.7.25.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]
Exception in thread "main" java.lang.NoSuchMethodError: com.google.common.base.Preconditions.checkArgument(ZLjava/lang/String;Ljava/lang/Object;)V
解决jar包问题
mv $HIVE_HOME/lib/log4j-slf4j-impl-2.10.0.jar $HIVE_HOME/lib/log4j-slf4j-impl-2.10.0.jar.bak
cp $HADOOP_HOME/share/hadoop/common/lib/guava-27.0-jre.jar $HIVE_HOME/lib/
mv $HIVE_HOME/lib/guava-19.0.jar $HIVE_HOME/lib/guava-19.0.jar.bak
启动Hive
#先启动zookeeper和hadoopHA
#执行hive启动命令
hive
MySQL存储元数据
#卸载mariadb
rpm -qa|grep mariadb
rpm -e --nodeps 文件名
rpm -e --nodeps mariadb-libs-5.5.68-1.el7.x86_64
#解压
tar xvf mysql-5.7.32-1.el7.x86_64.rpm-bundle.tar
#安装
rpm -ivh mysql-community-common-5.7.32-1.el7.x86_64.rpm
rpm -ivh mysql-community-libs-5.7.32-1.el7.x86_64.rpm
rpm -ivh mysql-community-libs-compat-5.7.32-1.el7.x86_64.rpm
rpm -ivh mysql-community-devel-5.7.32-1.el7.x86_64.rpm
rpm -ivh mysql-community-client-5.7.32-1.el7.x86_64.rpm
rpm -ivh mysql-community-server-5.7.32-1.el7.x86_64.rpm
#启动
systemctl start mysqld
systemctl enable mysqld
#查看临时密码
grep 'temporary password' /var/log/mysqld.log
#登录MySQL
mysql -uroot -p'临时密码'
#修改密码强度策略
set global validate_password_length=4;
set global validate_password_policy=0;
#修改密码
set password=password('000000');
修改mysql root用户的登录权限
update mysql.user set Host='%' where HOST='localhost' and User='root';
#创建hive元数据库
CREATE DATABASE hive DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci;
#授权
grant all on hive.* to 'hive'@'%' identified by '000000';
#刷新权限
flush privileges;
#上传node01配置mysql驱动包
cp mysql-connector-java-5.1.47.jar /opt/module/apache-hive-3.1.2-bin/lib
配置hive-core.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<!-- jdbc连接的URL -->
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://node01:3306/hive?useSSL=false</value>
</property>
<!-- jdbc连接的Driver-->
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
</property>
<!-- jdbc连接的username-->
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>hive</value>
</property>
<!-- jdbc连接的password -->
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>000000</value>
</property>
<!-- Hive默认在HDFS的工作目录 -->
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/user/hive/warehouse</value>
</property>
<!-- 指定hiveserver2连接的端口号 -->
<property>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<!-- 指定hiveserver2连接的host -->
<property>
<name>hive.server2.thrift.bind.host</name>
<value>node01</value>
</property>
<!-- 指定存储元数据要连接的地址 -->
<property>
<name>hive.metastore.uris</name>
<value>thrift://node01:9083</value>
</property>
<!-- 元数据存储授权 -->
<property>
<name>hive.metastore.event.db.notification.api.auth</name>
<value>false</value>
</property>
<!-- Hive元数据存储版本的验证 -->
<property>
<name>hive.metastore.schema.verification</name>
<value>false</value>
</property>
<!-- hiveserver2的高可用参数,开启此参数可以提高hiveserver2的启动速度 -->
<property>
<name>hive.server2.active.passive.ha.enable</name>
<value>true</value>
</property>
<!-- hive方式访问客户端:打印 当前库 和 表头 -->
<property>
<name>hive.cli.print.header</name>
<value>true</value>
<description>Whether to print the names of the columns in query output.</description>
</property>
<property>
<name>hive.cli.print.current.db</name>
<value>true</value>
<description>Whether to include the current database in the Hive prompt.</description>
</property>
</configuration>
初始化MySQL元数据库
schematool -initSchema -dbType mysql -verbose
jdbc方式连接Hive
# 启动元数据服务
hive --service metastore
# 启动hiveserver2
hive --service hiveserver2
# jdbc连接hive
beeline -u jdbc:hive2://node01:10000 -n root
#报错
[root@node01 conf]# beeline -u jdbc:hive2://node01:10000 -n root
Connecting to jdbc:hive2://node01:10000
21/07/27 20:57:30 [main]: WARN jdbc.HiveConnection: Failed to connect to node01:10000
Error: Could not open client transport with JDBC Uri: jdbc:hive2://node01:10000: Failed to open new session: java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.authorize.AuthorizationException): User: root is not allowed to impersonate root (state=08S01,code=0)
Beeline version 3.1.2 by Apache Hive
#修改hadoop的core-site.xml
<property>
<name>hadoop.proxyuser.root.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.groups</name>
<value>*</value>
</property>
再将RunJar进程杀死,重启集群就解决问题了
kill -9 pid
参考 https://blog.csdn.net/qq_52241909/article/details/111884774
|