hive3.1 op&dev

版本:
hadoop-3.3.0.tar.gz
apache-hive-3.1.2-bin.tar.gz
zookeeper-3.4.9.tar.gz
spark-2.3.2-bin-without-hadoop.gz

配置

环境变量

1
2
3
4
5
6
7
8
9
10
11
export MYSQL_HOME=/usr/local/mysql
export PATH=$PATH:$MYSQL_HOME/bin
export JAVA_HOME=/usr/java/jdk1.8.0_131
export JRE_HOME=$JAVA_HOME/jre
export CLASSPATH=$JAVA_HOME/lib:$JRE_HOME/lib:$CLASSPATH
export PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin
export HADOOP_HOME=/home/software/hadoop-3.3.0
export PATH=$PATH:$HADOOP_HOME/bin
export PATH=$PATH:$HADOOP_HOME/sbin
export HIVE_HOME=/home/software/apache-hive-3.1.2-bin
export PATH=$PATH:$HIVE_HOME/bin

cp hive-site.xml模板

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://sdw34:3306/hive?createDatabaseIfNotExist=true&amp;useSSL=false</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>dev</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>dev</value>
</property>
<property>
<name>hive.cluster.delegation.token.store.zookeeper.connectString</name>
<value>sdw34</value>
</property>
<property>
<name>hive.zookeeper.quorum</name>
<value>sdw34:2181</value>
</property>
<property>
<name>hive.metastore.uris</name>
<value>thrift://192.168.1.34:9083</value>
</property>
<property>
<name>hive.execution.engine</name>
<value>mr</value>
</property>
<property>
<name>hive.metastore.event.db.notification.api.auth</name>
<value>false</value>
</property>

<property>
<name>hive.server2.authentication</name>
<value>NONE</value>
</property>

<property>
<name>hive.server2.thrift.client.user</name>
<value>root</value>
<description>Username to use against thrift client</description>
</property>
<property>
<name>hive.server2.thrift.client.password</name>
<value>root</value>
<description>Password to use against thrift client</description>
</property>
<property>
<name>hive.server2.enable.doAs</name>
<value>true</value>
</property>
<property>
<name>hive.server2.allow.user.substitution</name>
<value>true</value>
</property>
<property>
<name>datanucleus.schema.autoCreateAll</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.schema.verification</name>
<value>false</value>
<description>
Enforce metastore schema version consistency.
True: Verify that version information stored in is compatible with one from Hive jars. Also disable automatic
schema migration attempt. Users are required to manually migrate schema after Hive upgrade which ensures
proper metastore schema migration. (Default)
False: Warn if the version information stored in metastore doesn't match with one from in Hive jars.
</description>
</property>
<!--<property>
<name>hive.server2.thrift.bind.host</name>
<value>sdw34</value>
</property>-->
</configuration>

cp hive-log4j2.properties模板

1
2
# 改个路径
property.hive.log.dir = /home/software/apache-hive-3.1.2-bin/logs

cp hive-env.sh模板

1
2
3
# 加上环境变量
export HADOOP_HOME=/home/software/hadoop-3.3.0
export JAVA_HOME=/usr/java/jdk1.8.0_131

mysql-connector的jar包放到lib下
将hadoop/share/下的guava-27移到hive/lib下并删除hive的低版本guava
cp core-site.xml文件到hive/conf下

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->

<!-- Put site-specific property overrides in this file. -->

<configuration>
<property>
<name>fs.defaultFS</name>
<!-- xwzx.com为当前机器名或者ip号 -->
<value>hdfs://sdw34:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<!-- 以下为存放临时文件的路径 -->
<value>/home/software/hadoop-3.3.0/data/tmp</value>
</property>
<property>
<name>hadoop.proxyuser.dev.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.dev.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.groups</name>
<value>*</value>
</property>
<!--<property>
<name>hadoop.proxyuser.hive.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hive.groups</name>
<value>*</value>
</property>-->
<property>
<name>hadoop.proxyuser.hadoop.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hadoop.groups</name>
<value>*</value>
</property>
<property>
<name>io.compression.codecs</name>
<value>
org.apache.hadoop.io.compress.DefaultCodec,
org.apache.hadoop.io.compress.GzipCodec,
org.apache.hadoop.io.compress.BZip2Codec,
org.apache.hadoop.io.compress.Lz4Codec,
org.apache.hadoop.io.compress.SnappyCodec
</value>
<description>A comma-separated list of the compression codec classes that can
be used for compression/decompression. In addition to any classes specified
with this property (which take precedence), codec classes on the classpath
are discovered using a Java ServiceLoader.</description>
</property>
</configuration>
启动
1
2
3
4
5
6
7
# 初始化
schematool -dbType mysql -initSchema
# 启动metastore
hive --service metastore &
# 启动hiveserver2 hiveserver2的服务端口默认是10000,WebUI端口默认是10002
hive --service hiveserver2 &
# 或者 cd hiveserver2 &
hql
1
2
3
4
5
6
# 删除hive内部表,不会删除数据
# 将内部表改成外部表
alter table table_name set TBLPROPERTIES('EXTERNAL'='TRUE');

# 改名
ALTER TABLE name RENAME TO new_name
文档

https://www.docs4dev.com/docs/zh/apache-hive/3.1.1/reference/Configuration_Properties.html

文章作者: CYBSKY
文章链接: https://cybsky.top/2022/09/07/cyb-mds/bigdata/Hive/hive3.1 op&dev/
版权声明: 本博客所有文章除特别声明外,均采用 CC BY-NC-SA 4.0 许可协议。转载请注明来自 CYBSKY