栏目分类:
子分类:
返回
名师互学网用户登录
快速导航关闭
当前搜索
当前分类
子分类
实用工具
热门搜索
名师互学网 > IT > 前沿技术 > 大数据 > 大数据系统

基于Java开发Hive篇

基于Java开发Hive篇

package com.hj.hive;

import java.io.File;

import java.sql.Connection;

import java.sql.DriverManager;

import java.sql.PreparedStatement;

import java.sql.ResultSet;

import java.sql.ResultSetmetaData;

import java.sql.SQLException;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.Path;

public class JDBC {

        static Configuration conf = null;

        public void init() {

                String proDir = System.getProperty("user.dir");

                System.out.println(proDir);

                String baseDir = proDir + File.separator + "conf";

                // 获取core-site.xml文件

                String corePath = baseDir + File.separator + "core-site.xml";

                // 获取hdfs-site.xml文件

                String hdfsPath = baseDir + File.separator + "hdfs-site.xml";

                conf = new Configuration();

                conf.addResource(new Path(corePath));

                conf.addResource(new Path(hdfsPath));

                conf.set("fs.defaultFS", "hdfs://hadoop:9000");

}

public static void main(String[] args) throws ClassNotFoundException, SQLException {

        // TODO Auto-generated method stub

        JDBC jdbc = new JDBC();

        jdbc.init();

        // 创建HQL语句

        String[] sqls = {"CREATE TABLE IF NOT EXISTS stu (id INT,name STRING)",

              "Select COUNT(*) FROM stu",

              "DROP TABLE stu"};

        // 创建JDBC URL

        String url = "jdbc:hive2://hadoop:10000";

        // 创建hive驱动

        String driverName = "org.apache.hive.jdbc.HiveDriver";

        Class.forName(driverName);

        // 创建连接

        Connection connection = null;

        try {

                // 获取JDBC链接

                connection = DriverManager.getConnection(url, "root", "123456");

                exeDDL(connection, sqls[0]);

                System.out.println("success!");

                // 查询

                exeDML(connection, sqls[1]);

                // 删除

                exeDDL(connection, sqls[2]);

                System.out.println("delete success!");

        } catch (Exception e) {

                // TODO: handle exception

                System.out.println("failed: "+e.getMessage());//获取异常信息

        } finally {

                if (connection != null){

                connection.close(); // 关闭connection连接

        }

        }

}

private static void exeDML(Connection connection, String sql) throws SQLException {

        // 查询完之后返回一个对象ResultSet

        PreparedStatement preparedStatement = null;

        ResultSet resultSet = null;

        ResultSetmetaData resultSetmetaData = null;

        try {

                // 执行SQL

                preparedStatement = connection.prepareStatement(sql);

                resultSet = preparedStatement.executeQuery(); //resultSet接收执行语句返回的结果

                // 输出结果到控制台

                resultSetmetaData = resultSet.getmetaData();

                int column = resultSetmetaData.getColumnCount();

                for (int i = 1; i <= column; i++){

                        System.out.println(resultSetmetaData.getColumnLabel(i) + "t");

                }

         } catch (Exception e) {

                // TODO: handle exception

        } finally {

                if ( resultSet != null){

                resultSet.close();

                }

                if (preparedStatement != null){

                        preparedStatement.close();

                }

        }

}

private static void exeDDL(Connection connection, String sql) throws SQLException{

        //preparestatment JDBC存储过程(批量处理)

        // statment 一次处理

        PreparedStatement preparedStatement = null;

        try {

                preparedStatement = connection.prepareStatement(sql);

                // 执行语句

                preparedStatement.execute();

        } catch (Exception e) {

                // TODO: handle exception

        }

        finally {

                if(preparedStatement != null){

                preparedStatement.close();

        }

        }

        }

}

1、创建stu表

package com.hj.hive;

import org.apache.hadoop.hive.ql.exec.UDF;

import org.apache.hadoop.io.Text;

public class Udf extends UDF{

        // 将大写字母转化成小写字母

        public Text evaluate(final Text s){

        if (s == null){

                return null;

        }

        return new Text(s.toString().toUpperCase());

   }

}

转载请注明:文章转载自 www.mshxw.com
本文地址:https://www.mshxw.com/it/423037.html
我们一直用心在做
关于我们 文章归档 网站地图 联系我们

版权所有 (c)2021-2022 MSHXW.COM

ICP备案号:晋ICP备2021003244-6号