package com.hj.hive;
import java.io.File;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetmetaData;
import java.sql.SQLException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
public class JDBC {
static Configuration conf = null;
public void init() {
String proDir = System.getProperty("user.dir");
System.out.println(proDir);
String baseDir = proDir + File.separator + "conf";
// 获取core-site.xml文件
String corePath = baseDir + File.separator + "core-site.xml";
// 获取hdfs-site.xml文件
String hdfsPath = baseDir + File.separator + "hdfs-site.xml";
conf = new Configuration();
conf.addResource(new Path(corePath));
conf.addResource(new Path(hdfsPath));
conf.set("fs.defaultFS", "hdfs://hadoop:9000");
}
public static void main(String[] args) throws ClassNotFoundException, SQLException {
// TODO Auto-generated method stub
JDBC jdbc = new JDBC();
jdbc.init();
// 创建HQL语句
String[] sqls = {"CREATE TABLE IF NOT EXISTS stu (id INT,name STRING)",
"Select COUNT(*) FROM stu",
"DROP TABLE stu"};
// 创建JDBC URL
String url = "jdbc:hive2://hadoop:10000";
// 创建hive驱动
String driverName = "org.apache.hive.jdbc.HiveDriver";
Class.forName(driverName);
// 创建连接
Connection connection = null;
try {
// 获取JDBC链接
connection = DriverManager.getConnection(url, "root", "123456");
exeDDL(connection, sqls[0]);
System.out.println("success!");
// 查询
exeDML(connection, sqls[1]);
// 删除
exeDDL(connection, sqls[2]);
System.out.println("delete success!");
} catch (Exception e) {
// TODO: handle exception
System.out.println("failed: "+e.getMessage());//获取异常信息
} finally {
if (connection != null){
connection.close(); // 关闭connection连接
}
}
}
private static void exeDML(Connection connection, String sql) throws SQLException {
// 查询完之后返回一个对象ResultSet
PreparedStatement preparedStatement = null;
ResultSet resultSet = null;
ResultSetmetaData resultSetmetaData = null;
try {
// 执行SQL
preparedStatement = connection.prepareStatement(sql);
resultSet = preparedStatement.executeQuery(); //resultSet接收执行语句返回的结果
// 输出结果到控制台
resultSetmetaData = resultSet.getmetaData();
int column = resultSetmetaData.getColumnCount();
for (int i = 1; i <= column; i++){
System.out.println(resultSetmetaData.getColumnLabel(i) + "t");
}
} catch (Exception e) {
// TODO: handle exception
} finally {
if ( resultSet != null){
resultSet.close();
}
if (preparedStatement != null){
preparedStatement.close();
}
}
}
private static void exeDDL(Connection connection, String sql) throws SQLException{
//preparestatment JDBC存储过程(批量处理)
// statment 一次处理
PreparedStatement preparedStatement = null;
try {
preparedStatement = connection.prepareStatement(sql);
// 执行语句
preparedStatement.execute();
} catch (Exception e) {
// TODO: handle exception
}
finally {
if(preparedStatement != null){
preparedStatement.close();
}
}
}
}
1、创建stu表
package com.hj.hive;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.Text;
public class Udf extends UDF{
// 将大写字母转化成小写字母
public Text evaluate(final Text s){
if (s == null){
return null;
}
return new Text(s.toString().toUpperCase());
}
}



