我为Hive JDBC编写了这个小程序。起初,它执行得很好,但当我现在试图运行时,突然出现了错误。
import java.io.FileWriter;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
public class SampleHiveProgram
{
String lyear="";
String lquarter="";
String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
public static void main(String[] args)
{
SampleHiveProgram s=new SampleHiveProgram();
s.startHiveThriftServer();
s.quarterTable();
}
public void startHiveThriftServer()
{
try
{
String cmd = "/home/hadoop/sqoop-1.3.0-cdh3u1/bin/StartHiveThriftServer.sh"; // this is the command to execute in the Unix shell
// create a process for the shell
ProcessBuilder pb = new ProcessBuilder("bash", "-c", cmd);
pb.redirectErrorStream(true); // use this to capture messages sent to stderr
Process shell = pb.start();
InputStream shellIn = shell.getInputStream(); // this captures the output from the command
// wait for the shell to finish and get the return code
// at this point you can process the output issued by the command
// for instance, this reads the output and writes it to System.out:
int c;
while ((c = shellIn.read()) != -1)
{
System.out.write(c);
}
int shellExitStatus = shell.waitFor();
// close the stream
shellIn.close();
}
catch(Exception e)
{
e.printStackTrace();
System.exit(1);
}
}
public void quarterTable()
{
try
{
String start="2010-01-01";
String end="2011-01-01";
System.out.println("in quarter table...");
//create connection with database
Class.forName(driverName);
Connection con = DriverManager.getConnection("jdbc:hive://localhost:10000/default", "", "");
String sql=null;
Statement stmt = con.createStatement();
ResultSet res=null;
sql="drop table TmpQuarterTable";
System.out.println("Dropping the Quarter Table...");
res = stmt.executeQuery(sql);
//Creating Quarter Table
sql="create table TmpQuarterTable (year string, quarter string, quarterstart string, quarterend string, quartername string)" +
" ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\012' STORED AS TEXTFILE ";
System.out.println("Creating the Quarter Table...");
res = stmt.executeQuery(sql);
//create the file
FileWriter fw=new FileWriter("/home/hadoop/Quarter.txt");
//convert string date to calendar date
DateFormat formatter =new SimpleDateFormat("yyyy-MM-dd");
Date sdate=(Date)formatter.parse(start);
Date edate=(Date)formatter.parse(end);
Calendar c1=Calendar.getInstance();
Calendar c2=Calendar.getInstance();
c1.setTime(sdate);
c2.setTime(edate);
int q=0;
String QuarterEndDate=null;
int resultMonthCount=0;
int resultYear =0;
int resultMonth =0;
Calendar c3=Calendar.getInstance();
c3.setTime(c1.getTime());
while(c3.compareTo(c2)<=0)
{
if(c3.get(Calendar.MONTH)>=0 && c3.get(Calendar.MONTH)<=2)
{
q=1;
QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+"-04-01";
}
else if(c3.get(Calendar.MONTH)>=3 && c3.get(Calendar.MONTH)<=5)
{
q=2;
QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+"-07-01";
}
else if(c3.get(Calendar.MONTH)>=6 && c3.get(Calendar.MONTH)<=8)
{
q=3;
QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+"-10-01";
}
else if(c3.get(Calendar.MONTH)>=9 && c3.get(Calendar.MONTH)<=11)
{
q=4;
QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR)+1)+"-01-01";
}
//Got the QuarterEndDate (YYYY-MM-DD)
//split the QuarterEndDate into qdate and create quarter_end_date
String[] qdate=QuarterEndDate.split("-");
Calendar quarter_end_date=Calendar.getInstance();
quarter_end_date.set(Integer.parseInt(qdate[0]),Integer.parseInt(qdate[1]),Integer.parseInt(qdate[2]));
System.out.println("quarter_end_date : "+quarter_end_date);
//YY
String YY=Integer.toString(c3.get(Calendar.YEAR));
//quarter start date = quarter end date - 1
Calendar quarter_start_date=Calendar.getInstance();
quarter_start_date.set(quarter_end_date.get(Calendar.YEAR),quarter_end_date.get(Calendar.MONTH),quarter_end_date.get(Calendar.DATE));
quarter_start_date.add(Calendar.YEAR, -1);
//year
String year=Integer.toString(quarter_start_date.get(Calendar.YEAR));
System.out.println("year : "+year);
//month
String months=null;
if(quarter_start_date.get(Calendar.MONTH)<10)
months="0"+Integer.toString(quarter_start_date.get(Calendar.MONTH));
else
months=Integer.toString(quarter_start_date.get(Calendar.MONTH));
System.out.println("month : "+months);
//day
String day=null;
if(quarter_start_date.get(Calendar.DATE)<10)
day="0"+Integer.toString(quarter_start_date.get(Calendar.DATE));
else
day=Integer.toString(quarter_start_date.get(Calendar.DATE));
System.out.println("day : "+day);
//adding 3 months
resultMonthCount = c3.get(Calendar.MONTH) + 3;
resultYear = c3.get(Calendar.YEAR);
resultMonth = resultMonthCount - resultYear * 12;
System.out.println("YEAR : "+resultYear);
c3.set(Calendar.YEAR, resultYear);
System.out.println("MONTH : "+resultMonthCount);
c3.set(Calendar.MONTH, resultMonthCount);
System.out.println("c3 : "+c3.getTime());
//YY,q,startDate,endDate,quartername ------->> write to file
String QuarterStartDate=year+"-"+months+"-"+day;
String quarterName=YY+"\"Q\""+q;
fw.write(YY+","+q+","+QuarterStartDate+","+QuarterEndDate+","+quarterName+"\n");
}//end of while
fw.close();
String filepath = "/home/hadoop/Quarter.txt";
sql = "load data local inpath '" + filepath + "' overwrite into table TmpQuarterTable";
System.out.println("Running: " + sql);
stmt.executeUpdate(sql);
}
catch(Exception e)
{
e.printStackTrace();
System.exit(1);
}
}
}
错误类似于:线程“main”java中的异常。lang.composibleClassChangeError:class com。脸谱网。fb303。FacebookService$客户端具有接口组织。阿帕奇。节约TServiceClient作为超级类
Exception in thread "main" java.lang.IncompatibleClassChangeError: class com.facebook.fb303.FacebookService$Client has interface org.apache.thrift.TServiceClient as super class
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631)
at java.lang.ClassLoader.defineClass(ClassLoader.java:615)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:283)
at java.net.URLClassLoader.access$000(URLClassLoader.java:58)
at java.net.URLClassLoader$1.run(URLClassLoader.java:197)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:190)
at java.lang.ClassLoader.loadClass(ClassLoader.java:306)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301)
at java.lang.ClassLoader.loadClass(ClassLoader.java:247)
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631)
at java.lang.ClassLoader.defineClass(ClassLoader.java:615)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:283)
at java.net.URLClassLoader.access$000(URLClassLoader.java:58)
at java.net.URLClassLoader$1.run(URLClassLoader.java:197)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:190)
at java.lang.ClassLoader.loadClass(ClassLoader.java:306)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301)
at java.lang.ClassLoader.loadClass(ClassLoader.java:247)
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631)
at java.lang.ClassLoader.defineClass(ClassLoader.java:615)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:283)
at java.net.URLClassLoader.access$000(URLClassLoader.java:58)
at java.net.URLClassLoader$1.run(URLClassLoader.java:197)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:190)
at java.lang.ClassLoader.loadClass(ClassLoader.java:306)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301)
at java.lang.ClassLoader.loadClass(ClassLoader.java:247)
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631)
at java.lang.ClassLoader.defineClass(ClassLoader.java:615)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:283)
at java.net.URLClassLoader.access$000(URLClassLoader.java:58)
at java.net.URLClassLoader$1.run(URLClassLoader.java:197)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:190)
at java.lang.ClassLoader.loadClass(ClassLoader.java:306)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301)
at java.lang.ClassLoader.loadClass(ClassLoader.java:247)
at org.apache.hadoop.hive.jdbc.HiveConnection.<init>(HiveConnection.java:93)
at org.apache.hadoop.hive.jdbc.HiveDriver.connect(HiveDriver.java:104)
at java.sql.DriverManager.getConnection(DriverManager.java:582)
at java.sql.DriverManager.getConnection(DriverManager.java:185)
at SampleHiveProgram.quarterTable(SampleHiveProgram.java:64)
at SampleHiveProgram.main(SampleHiveProgram.java:22)
我到底是怎么了?我已经添加了蜂巢所需的所有罐子。
当我试图针对HIVE运行一个示例JDBC时,我遇到了同样的错误。然后我将libfb*. jar的位置添加到类路径
[cloudera@quickstart ownjava]$ echo $CLASSPATH
/etc/hadoop/conf:/usr/lib/hadoop/lib/*:/usr/lib/hadoop/.//*:/usr/lib/hadoop-hdfs/./:/usr/lib/hadoop-hdfs/lib/*:/usr/lib/hadoop-hdfs/.//*:/usr/lib/hadoop-yarn/lib/*:/usr/lib/hadoop-yarn/.//*:/usr/lib/hadoop-mapreduce/lib/*:/usr/lib/hadoop-mapreduce/.//*:/usr/lib/hive/lib/:/usr/lib/hive/lib/hive-exec.jar:/usr/lib/hive/lib/hive-exec-0.12.0-cdh5.1.0.jar:/usr/lib/hive/lib/hive-service.jar:/usr/lib/hive/lib/hive-service-0.12.0-cdh5.1.0.jar:/usr/lib/hive/lib/hive-metastore.jar:/usr/lib/hive/lib/hive-metastore-0.12.0-cdh5.1.0.jar:/home/cloudera/ownjava/:/usr/lib/hive/lib/hive-jdbc-0.12.0-cdh5.1.0.jar:/usr/lib/hive/lib/hive-jdbc.jar:/usr/lib/hive/lib/libfb303-0.9.0.jar:/usr/lib/hive/lib/jdo-api-3.0.1.jar:/usr/lib/hive/lib/antlr-runtime-3.4.jar
你身边有没有不止不同版本的节俭和libthrift?这听起来像是用一个版本的thrift生成的类,但试图在运行时使用另一个版本的libthrift。
下面是如何通过配置单元JDBC运行查询的 从纱线URL,找不到应用程序ID。
我已经为hive创建了一个名为Zodiac的UDF,然后添加了JAR文件,并在hive中创建了临时函数。
我试图在Hive0.14中执行HiveACID事务属性,比如通过Java插入、删除和更新。我能够设置所需的ACID事务属性。还可以创建具有事务属性的表。但它失败了。下面是示例代码: 尝试插入时获得以下异常: 线程“main”java中出现异常。sql。SQLException:处理语句时出错:失败:执行错误,从组织返回代码1。阿帕奇。hadoop。蜂箱ql.exec。org的MapRedTask先
我试图运行此函数以获取配置单元中的当前日期,但出现以下错误: 编译语句时出错:FAILED:SemanticExctive无有效特权此查询所需的特权:Server=server1- 我在网上搜索,并被建议以下功能来获取Hive中的当前日期,但都给出了相同的错误: 但是,如果我按照给定的方式运行它们,它们都会显示错误。
下面的查询是我通过配置单元客户端、Java程序JDBC和Beeline执行的。 在Hive cilent上执行时,只需21s就完成了,而Java程序JDBC和beeline分别需要110s和200s。 配置单元客户端仅使用一个映射器就完成了此操作。 Java JDBC和beeline执行了5个MR作业。每个MR作业需要2个映射器和1个还原器。 下面是每个表的行计数。 table_one有44981
我使用Hortonworks Hadoop HDP-2.3.2.0-2950 Hive over Tez引擎 下面2个查询来自Java代码。 ——运行良好 ——失败并出现错误 我的代码: