import
--options-file ./dss_conn_parms.txt
--table BD.BD_TABLE
--target-dir /user/user_1/DMS
--m 1
--hive-import
--hive-table BD.BD_TABLE_HIVE
73167 [main] INFO org.apache.sqoop.hive.HiveImport - Loading uploaded data into Hive
2016-10-17 09:42:55,203 INFO [main] hive.HiveImport (HiveImport.java:importTable(195)) - Loading uploaded data into Hive
73180 [main] ERROR org.apache.sqoop.tool.ImportTool - Encountered IOException running import job: java.io.IOException: Cannot run program "hive": error=13, Permission denied
at java.lang.ProcessBuilder.start(ProcessBuilder.java:1048)
at java.lang.Runtime.exec(Runtime.java:620)
at java.lang.Runtime.exec(Runtime.java:528)
at org.apache.sqoop.util.Executor.exec(Executor.java:76)
at org.apache.sqoop.hive.HiveImport.executeExternalHiveScript(HiveImport.java:391)
at org.apache.sqoop.hive.HiveImport.executeScript(HiveImport.java:344)
at org.apache.sqoop.hive.HiveImport.importTable(HiveImport.java:245)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:514)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:605)
at org.apache.sqoop.Sqoop.run(Sqoop.java:148)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:184)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:226)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:235)
at org.apache.sqoop.Sqoop.main(Sqoop.java:244)
at org.apache.oozie.action.hadoop.SqoopMain.runSqoopJob(SqoopMain.java:197)
at org.apache.oozie.action.hadoop.SqoopMain.run(SqoopMain.java:177)
at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:47)
at org.apache.oozie.action.hadoop.SqoopMain.main(SqoopMain.java:46)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:241)
at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:168)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1709)
at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:162)
Caused by: java.io.IOException: error=13, Permission denied
at java.lang.UNIXProcess.forkAndExec(Native Method)
at java.lang.UNIXProcess.<init>(UNIXProcess.java:248)
at java.lang.ProcessImpl.start(ProcessImpl.java:134)
at java.lang.ProcessBuilder.start(ProcessBuilder.java:1029)
... 31 more
2016-10-17 09:42:55,216 ERROR [main] tool.ImportTool (ImportTool.java:run(613)) - Encountered IOException running import job: java.io.IOException: Cannot run program "hive": error=13, Permission denied
at java.lang.ProcessBuilder.start(ProcessBuilder.java:1048)
at java.lang.Runtime.exec(Runtime.java:620)
at java.lang.Runtime.exec(Runtime.java:528)
at org.apache.sqoop.util.Executor.exec(Executor.java:76)
at org.apache.sqoop.hive.HiveImport.executeExternalHiveScript(HiveImport.java:391)
at org.apache.sqoop.hive.HiveImport.executeScript(HiveImport.java:344)
at org.apache.sqoop.hive.HiveImport.importTable(HiveImport.java:245)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:514)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:605)
at org.apache.sqoop.Sqoop.run(Sqoop.java:148)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:184)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:226)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:235)
at org.apache.sqoop.Sqoop.main(Sqoop.java:244)
at org.apache.oozie.action.hadoop.SqoopMain.runSqoopJob(SqoopMain.java:197)
at org.apache.oozie.action.hadoop.SqoopMain.run(SqoopMain.java:177)
at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:47)
at org.apache.oozie.action.hadoop.SqoopMain.main(SqoopMain.java:46)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:241)
at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:168)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1709)
at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:162)
Caused by: java.io.IOException: error=13, Permission denied
at java.lang.UNIXProcess.forkAndExec(Native Method)
at java.lang.UNIXProcess.<init>(UNIXProcess.java:248)
at java.lang.ProcessImpl.start(ProcessImpl.java:134)
at java.lang.ProcessBuilder.start(ProcessBuilder.java:1029)
... 31 more
Intercepting System.exit(1)
<<< Invocation of Main class completed <<<
Failing Oozie Launcher, Main class [org.apache.oozie.action.hadoop.SqoopMain], exit code [1]
我在命令行中使用配置单元导入选项执行了Sqoop作业,我知道问题出在哪里。在命令行中,我可以看到以下信息:
Logging initialized using configuration in jar:file:/usr/hdp/2.4.2.0-258 /hive/lib/hive-common-1.2.1000.2.4.2.0-jar!/hive-log4j.properties
OK
问题在于访问位于本地文件系统上的hive-common-1.2.1000.2.4.2.0-jar。你知道我该怎么办吗?
1)尝试添加
<env-var>HADOOP_USER_NAME=YOUR_USERNAME</env-var>
在workflow.xml中作业定义的
部分中
2)在core-site.xml中添加
<property>
<name>hadoop.proxyuser.oozie.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.oozie.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.YOUR_USERNAME.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.YOUR_USERNAME.groups</name>
<value>*</value>
</property>
我正在尝试在启用kerberos的环境中执行oozie配置单元操作中的配置单元脚本。 这是我的workflow.xml 我在尝试连接到hive metastore时遇到问题。 6870[main]INFO hive.metastore-正在尝试使用URI连接到metastorethrift://10.0.0.242:9083 心跳心跳67016[main]WARN hive.metastore-s
我想将数据从postgres导入到配置单元,然后输入以下命令: 但我看到这条失败的消息: 问题是什么?我如何修复这个故障???
我试图使用将数据从复制到。但是,尽管我在文件中设置了变量,但仍然出现了以下错误。下面是我的代码: bashrc文件中的变量: 错误: 我还需要在这里添加/修改什么??
我可以成功地运行SELECT*FROM records这样的语句,这不会创建一个map/reduce任务。 但是当我尝试运行SELECT*FROM records where year='1949'时,映射/减少任务总是会出现一些错误 hadoop give me诊断: 应用程序application_1382680612829_0136失败1次,原因是appattempt_13826806128
错误:java.io.ioException:无法导出数据,请在org.apache.sqoop.mapreduce.textexportMapper.map(textexportMapper.java:112)在org.apache.sqoop.mapreduce.textexportMapper.map(textexportMapper.java:39)在org.apache.sqoop.ma
当我在oozie中执行spark进程时,我有以下错误。找不到数据库。 这是我的火花配置