基于hadoop的Hive數(shù)據(jù)倉庫JavaAPI簡單調(diào)用的實例,關于Hive的簡介在此不贅述。hive提供了三種用戶接口:CLI,JDBC/ODBC和 WebUI
CLI,即Shell命令行
JDBC/ODBC 是 Hive 的Java,與使用傳統(tǒng)數(shù)據(jù)庫JDBC的方式類似
WebGUI是通過瀏覽器訪問 Hive
本文主要介紹的就是第二種用戶接口,直接進入正題。
1、Hive 安裝:
1)hive的安裝請參考網(wǎng)上的相關文章,測試時只在hadoop一個節(jié)點上安裝hive即可。
2)測試數(shù)據(jù)data文件'\t'分隔:
1 zhangsan
2 lisi
3 wangwu
3)將測試數(shù)據(jù)data上傳到linux目錄下,我放置在:/home/hadoop01/data
2、在使用 JDBC 開發(fā) Hive 程序時, 必須首先開啟 Hive 的遠程服務接口。使用下面命令進行開啟:
Java代碼
hive --service hiveserver >/dev/null 2>/dev/null &
3、測試代碼:
Java代碼
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.apache.log4j.Logger;
/**
* Hive的JavaApi
*
* 啟動hive的遠程服務接口命令行執(zhí)行:hive --service hiveserver >/dev/null 2>/dev/null &
*
* @author 吖大哥
*
*/
public class HiveJdbcCli {
private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive://hadoop3:10000/default";
private static String user = "hive";
private static String password = "mysql";
private static String sql = "";
private static ResultSet res;
private static final Logger log = Logger.getLogger(HiveJdbcCli.class);
public static void main(String[] args) {
Connection conn = null;
Statement stmt = null;
try {
conn = getConn();
stmt = conn.createStatement();
// 第一步:存在就先刪除
String tableName = dropTable(stmt);
// 第二步:不存在就創(chuàng)建
createTable(stmt, tableName);
// 第三步:查看創(chuàng)建的表
showTables(stmt, tableName);
// 執(zhí)行describe table操作
describeTables(stmt, tableName);
// 執(zhí)行l(wèi)oad data into table操作
loadData(stmt, tableName);
// 執(zhí)行 select * query 操作
selectData(stmt, tableName);
// 執(zhí)行 regular hive query 統(tǒng)計操作
countData(stmt, tableName);
} catch (ClassNotFoundException e) {
e.printStackTrace();
log.error(driverName + " not found!", e);
System.exit(1);
} catch (SQLException e) {
e.printStackTrace();
log.error("Connection error!", e);
System.exit(1);
} finally {
try {
if (conn != null) {
conn.close();
conn = null;
}
if (stmt != null) {
stmt.close();
stmt = null;
}
} catch (SQLException e) {
e.printStackTrace();
}
}
}
private static void countData(Statement stmt, String tableName)
throws SQLException {
sql = "select count(1) from " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("執(zhí)行“regular hive query”運行結果:");
while (res.next()) {
System.out.println("count ------>" + res.getString(1));
}
}
private static void selectData(Statement stmt, String tableName)
throws SQLException {
sql = "select * from " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("執(zhí)行 select * query 運行結果:");
while (res.next()) {
System.out.println(res.getInt(1) + "\t" + res.getString(2));
}
}
private static void loadData(Statement stmt, String tableName)
throws SQLException {
String filepath = "/home/hadoop01/data";
sql = "load data local inpath '" + filepath + "' into table "
+ tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
}
private static void describeTables(Statement stmt, String tableName)
throws SQLException {
sql = "describe " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("執(zhí)行 describe table 運行結果:");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
}
private static void showTables(Statement stmt, String tableName)
throws SQLException {
sql = "show tables '" + tableName + "'";
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("執(zhí)行 show tables 運行結果:");
if (res.next()) {
System.out.println(res.getString(1));
}
}
private static void createTable(Statement stmt, String tableName)
throws SQLException {
sql = "create table "
+ tableName
+ " (key int, value string) row format delimited fields terminated by '\t'";
stmt.executeQuery(sql);
}
private static String dropTable(Statement stmt) throws SQLException {
// 創(chuàng)建的表名
String tableName = "testHive";
sql = "drop table " + tableName;
stmt.executeQuery(sql);
return tableName;
}
private static Connection getConn() throws ClassNotFoundException,
SQLException {
Class.forName(driverName);
Connection conn = DriverManager.getConnection(url, user, password);
return conn;
}
}
4、測試結果
5、終端查詢結果:
hive> select * from testHive;
OK
1 zhangsan
2 lisi
3 wangwu
Time taken: 11.232 seconds
hive>