基于hadoop,zookeeper,hbase,spark的hive构建

su – wget https://archive.apache.org/dist/sqoop/1.4.7/sqoop-1.4.7.bin__hadoop-2.6.0.tar.gz wget https://archive.apache.org/dist/hive/hive-3.1.2/apache-hive-3.1.2-bin.tar.gz wget https://archive.apache.org/dist/accumulo/2.1.0/accumulo-2.1.0-bin.tar.gz wget https://repo.maven.apache.org/maven2/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar wget https://repo.maven.apache.org/maven2/org/codehaus/woodstox/stax2-api/4.1.1/stax2-api-4.1.1.jar tar -xvzf accumulo-2.1.0-bin.tar.gz tar -xvzf sqoop-1.4.7.bin__hadoop-2.6.0.tar.gz tar -xzvf apache-hive-3.1.2-bin.tar.gz mv woodstox-core-5.0.3.jar /usr/local/accumulo/lib/ mv accumulo-2.1.0 /usr/local/accumulo mv sqoop-1.4.7.bin__hadoop-2.6.0 /usr/local/sqoop mv apache-hive-3.1.2-bin /usr/local/hive mv stax2-api-4.1.1.jar /usr/local/accumulo/lib/

cp /usr/local/hadoop/share/hadoop/common/*.jar /usr/local/sqoop/lib/ cp /usr/local/hadoop/share/hadoop/mapreduce/*.jar /usr/local/sqoop/lib/ cp /usr/local/hadoop/share/hadoop/hdfs/*.jar /usr/local/sqoop/lib/

nano /etc/profile#然后添加下面几行

export SQOOP_HOME=/usr/local/sqoop 
export PATH=$SQOOP_HOME/bin:$PATH
export HADOOP_MAPRED_HOME=/usr/local/hadoop/share/hadoop/mapreduce
export HIVE_HOME=/usr/local/hive
export PATH=$HIVE_HOME/bin:$PATH
export HCAT_HOME=$HIVE_HOME/hcatalog
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export YARN_HOME=$HADOOP_HOME
export ACCUMULO_HOME=/usr/local/accumulo
export PATH=$PATH:$ACCUMULO_HOME/bin

退出后加载环境变量

source /etc/profile

nano /usr/local/sqoop/conf/sqoop-env.sh#添加下面两行

export HCAT_HOME=/usr/local/hive/hcatalog
export HIVE_HOME=/usr/local/hive

wget https://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-8.0.26.tar.gz
tar -xvzf mysql-connector-java-8.0.26.tar.gz

nano /usr/local/hive/conf/hive-env.sh#添加下面两行

export HADOOP_HOME=/usr/local/hadoop
export HADOOP_CLASSPATH=$HADOOP_HOME/share/hadoop/mapreduce/hadoop-mapreduce-client-core.jar:$HADOOP_HOME/share/hadoop/common/*:$HADOOP_HOME/share/hadoop/hdfs/*:$HADOOP_HOME/share/hadoop/mapreduce/*:$HADOOP_HOME/share/hadoop/yarn/*

mv /usr/local/hive/lib/guava-19.0.jar /tmp/ cp /usr/local/hadoop/share/hadoop/common/lib/guava-27.0-jre.jar /usr/local/hive/lib/ rm /usr/local/hbase/lib/client-facing-thirdparty/log4j-slf4j-impl-2.17.2.jar rm /usr/local/hive/lib/log4j-slf4j-impl-2.10.0.jar

cd /usr/lib/jvm/ apt-get update apt-get install openjdk-11-jdk –fix-missing

cd /usr/local/accumulo
nano /usr/local/accumulo/conf/accumulo-env.sh
把hadoop和zookeeper环境变量改回我们下载的路径,java改成我们安装的11的路径

export JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64

cp /usr/local/hive/conf/hive-default.xml.template /usr/local/hive/conf/hive-site.xml nano hive-site.xml

<configuration>
    <property>
        <name>hive.execution.engine</name>
        <value>spark</value>
        <description>设置 Hive 的执行引擎为 spark</description>
    </property>
    <property>
    <name>hive.exec.scratchdir</name>
    <value>/tmp/${system:user.name}</value>
</property>
<property>
    <name>hive.querylog.location</name>
    <value>${system:java.io.tmpdir}/${system:user.name}</value>
</property>
    <!-- 其他配置项 -->
</configuration>
<configuration>
    <property>
        <name>javax.jdo.option.ConnectionURL</name>
        <value>jdbc:mysql://localhost:3306/metastore_db?useSSL=false</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionUserName</name>
        <value>hiveuser</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionPassword</name>
        <value>hivepassword</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionDriverName</name>
        <value>com.mysql.cj.jdbc.Driver</value>
    </property>
    <property>
        <name>hive.metastore.uris</name>
        <value>thrift://localhost:9083</value>
    </property>
    <property>
        <name>hive.metastore.warehouse.dir</name>
        <value>/user/hive/warehouse</value>
    </property>
</configuration>

去到https://nowjava.com/jar/detail/m03077482/mysql-connector-java-8.0.19.jar.html里下载jar包
cp /usr/local/mysql-connector-java-8.0.19.jar $HIVE_HOME/lib/

mysql -u root -p

创建数据库:metastore_db。
使用 ALTER USER ‘hiveuser’@’localhost’ IDENTIFIED BY ‘123456’; 修改了 hiveuser 的密码。
使用 GRANT ALL PRIVILEGES ON metastore_db.* TO ‘hiveuser’@’localhost’; 授予了 hiveuser 对 metastore_db 的所有权限。
刷新权限,确保权限更改生效。

CREATE USER 'hiveuser'@'%' IDENTIFIED BY '123456';
GRANT ALL PRIVILEGES ON metastore_db.* TO 'hiveuser'@'%';
FLUSH PRIVILEGES;
CREATE DATABASE metastore_db;

dpkg -l | grep mysql apt update apt install mysql-server mkdir -p /var/lib/mysql usermod -d /var/lib/mysql mysql chown mysql:mysql /var/lib/mysql service mysql start hive –service metastore & schematool -initSchema -dbType mysql

sed -i ‘s|\${system:user.name}|hive|g’ /usr/local/hive/conf/hive-site.xml sed -i ‘s|\${system:java.io.tmpdir}|/tmp|g’ /usr/local/hive/conf/hive-site.xml

nano /usr/local/sqoop/conf/sqoop-env.sh

export HADOOP_ORG_APACHE_SQOOP_SQOOP_USER=$USER
export HADOOP_ORG_APACHE_SQOOP_SQOOP_OPTS=""

sqoop version hive

SHOW DATABASES;
暂无评论

发送评论 编辑评论


				
|´・ω・)ノ
ヾ(≧∇≦*)ゝ
(☆ω☆)
(╯‵□′)╯︵┴─┴
 ̄﹃ ̄
(/ω\)
∠( ᐛ 」∠)_
(๑•̀ㅁ•́ฅ)
→_→
୧(๑•̀⌄•́๑)૭
٩(ˊᗜˋ*)و
(ノ°ο°)ノ
(´இ皿இ`)
⌇●﹏●⌇
(ฅ´ω`ฅ)
(╯°A°)╯︵○○○
φ( ̄∇ ̄o)
ヾ(´・ ・`。)ノ"
( ง ᵒ̌皿ᵒ̌)ง⁼³₌₃
(ó﹏ò。)
Σ(っ °Д °;)っ
( ,,´・ω・)ノ"(´っω・`。)
╮(╯▽╰)╭
o(*////▽////*)q
>﹏<
( ๑´•ω•) "(ㆆᴗㆆ)
😂
😀
😅
😊
🙂
🙃
😌
😍
😘
😜
😝
😏
😒
🙄
😳
😡
😔
😫
😱
😭
💩
👻
🙌
🖕
👍
👫
👬
👭
🌚
🌝
🙈
💊
😶
🙏
🍦
🍉
😣
Source: github.com/k4yt3x/flowerhd
颜文字
Emoji
小恐龙
花!
上一篇
下一篇