首页 > 代码库 > java 操作hive通过jdbc
java 操作hive通过jdbc
直接代码吧:记得要开启hive jdbc服务hive --service hiveserver
package hive;import java.sql.Connection;import java.sql.DriverManager;import java.sql.ResultSet;import java.sql.Statement;public class HiveDemo { static{ //注册jdbc驱动 try { Class.forName("org.apache.hadoop.hive.jdbc.HiveDriver"); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public static void main(String[] args) throws Exception { //创建连接 Connection conn = DriverManager.getConnection("jdbc:hive://hadoop:10000/default","",""); //System.out.println(conn); Statement st = conn.createStatement(); String tableName = "u1_data"; //删除表 st.executeQuery("drop table "+tableName); //创建表 ResultSet rs = st.executeQuery("create table "+tableName+"(" + "userid int," + "movieid int," + "rating int," + "city string," + "viewTime string" + ")" + "row format delimited " + "fields terminated by ‘\t‘ " + "stored as textfile"); //显示所有的表 String sql = "show tables"; System.out.println("running:"+sql); rs = st.executeQuery(sql); if(rs.next()){ System.out.println(rs.getString(1)); } //得到表信息 sql = "describe "+tableName; System.out.println("running:"+sql); rs = st.executeQuery(sql); while(rs.next()){ System.out.println(rs.getString(1)+"\t"+rs.getString(2)); } //加载数据 String filePath = "hdfs://hadoop:9000/input"; sql = "load data inpath ‘"+filePath+"‘ overwrite into table "+tableName; System.out.println("running:"+sql); rs = st.executeQuery(sql); //查询数据 sql = "select * from "+tableName+" limit 5"; System.out.println("running:"+sql); rs = st.executeQuery(sql); while(rs.next()){ System.out.println(rs.getString(3)+"\t"+rs.getString(4)); } //查询数量 sql = "select count(*) from "+tableName; System.out.println("running:"+sql); rs = st.executeQuery(sql); while(rs.next()){ System.out.println(rs.getString(1)); } //关闭资源 rs.close(); st.close(); conn.close(); }}
声明:以上内容来自用户投稿及互联网公开渠道收集整理发布,本网站不拥有所有权,未作人工编辑处理,也不承担相关法律责任,若内容有误或涉及侵权可进行投诉: 投诉/举报 工作人员会在5个工作日内联系你,一经查实,本站将立刻删除涉嫌侵权内容。