spark-submit --master local[4] --driver-class-path /home/rahul.midha/test/src/main/resources/conf/ --class com.test.main.TestMain test.jar [21.12.2016] [00:47:58] ERROR [main] Exception : scala.ScalaReflectionException: class com.test.do.TestDetails in JavaMirror with sun.misc.Launcher$AppClassLoader@3e2f1b1a of type class sun.misc.Launcher$AppClassLoader with classpath [file:/home/rahul.midha/test/src/main/resources/conf/,file:/usr/local/spark/conf/,file:/usr/local/spark/jars/joda-time-2.9.3.jar,file:/usr/local/spark/jars/jcl-over-slf4j-1.7.16.jar,file:/usr/local/spark/jars/slf4j-api-1.7.16.jar,file:/usr/local/spark/jars/log4j-1.2.17.jar,file:/usr/local/spark/jars/javax.inject-1.jar,file:/usr/local/spark/jars/metrics-graphite-3.1.2.jar,file:/usr/local/spark/jars/commons-beanutils-core-1.8.0.jar,file:/usr/local/spark/jars/javax.annotation-api-1.2.jar,file:/usr/local/spark/jars/slf4j-log4j12-1.7.16.jar,file:/usr/local/spark/jars/commons-io-2.4.jar,file:/usr/local/spark/jars/mail-1.4.7.jar,file:/usr/local/spark/jars/derby-10.12.1.1.jar,file:/usr/local/spark/jars/hadoop-common-2.6.4.jar,file:/usr/local/spark/jars/commons-beanutils-1.7.0.jar,file:/usr/local/spark/jars/parquet-common-1.7.0.jar,file:/usr/local/spark/jars/spire-macros_2.11-0.7.4.jar,file:/usr/local/spark/jars/xbean-asm5-shaded-4.4.jar,file:/usr/local/spark/jars/spark-sql_2.11-2.0.1.jar,file:/usr/local/spark/jars/commons-lang3-3.3.2.jar,file:/usr/local/spark/jars/api-asn1-api-1.0.0-M20.jar,file:/usr/local/spark/jars/ivy-2.4.0.jar,file:/usr/local/spark/jars/commons-lang-2.6.jar,file:/usr/local/spark/jars/pyrolite-4.9.jar,file:/usr/local/spark/jars/super-csv-2.2.0.jar,file:/usr/local/spark/jars/arpack_combined_all-0.1.jar,file:/usr/local/spark/jars/datanucleus-core-3.2.10.jar,file:/usr/local/spark/jars/parquet-hadoop-bundle-1.6.0.jar,file:/usr/local/spark/jars/hadoop-yarn-server-web-proxy-2.6.4.jar,file:/usr/local/spark/jars/pmml-model-1.2.15.jar,file:/usr/local/spark/jars/scala-xml_2.11-1.0.2.jar,file:/usr/local/spark/jars/curator-client-2.6.0.jar,file:/usr/local/spark/jars/spark-tags_2.11-2.0.1.jar,file:/usr/local/spark/jars/hive-exec-1.2.1.spark2.jar,file:/usr/local/spark/jars/avro-mapred-1.7.7-hadoop2.jar,file:/usr/local/spark/jars/avro-ipc-1.7.7.jar,file:/usr/local/spark/jars/activation-1.1.1.jar,file:/usr/local/spark/jars/hadoop-mapreduce-client-app-2.6.4.jar,file:/usr/local/spark/jars/api-util-1.0.0-M20.jar,file:/usr/local/spark/jars/jta-1.1.jar,file:/usr/local/spark/jars/jetty-util-6.1.26.jar,file:/usr/local/spark/jars/metrics-core-3.1.2.jar,file:/usr/local/spark/jars/hadoop-hdfs-2.6.4.jar,file:/usr/local/spark/jars/spark-repl_2.11-2.0.1.jar,file:/usr/local/spark/jars/javolution-5.5.1.jar,file:/usr/local/spark/jars/json-20090211.jar,file:/usr/local/spark/jars/datanucleus-api-jdo-3.2.6.jar,file:/usr/local/spark/jars/spire_2.11-0.7.4.jar,file:/usr/local/spark/jars/guice-servlet-3.0.jar,file:/usr/local/spark/jars/chill_2.11-0.8.0.jar,file:/usr/local/spark/jars/avro-1.7.7.jar,file:/usr/local/spark/jars/scala-parser-combinators_2.11-1.0.4.jar,file:/usr/local/spark/jars/jersey-server-2.22.2.jar,file:/usr/local/spark/jars/compress-lzf-1.0.3.jar,file:/usr/local/spark/jars/stringtemplate-3.2.1.jar,file:/usr/local/spark/jars/json4s-ast_2.11-3.2.11.jar,file:/usr/local/spark/jars/paranamer-2.3.jar,file:/usr/local/spark/jars/calcite-avatica-1.2.0-incubating.jar,file:/usr/local/spark/jars/osgi-resource-locator-1.0.1.jar,file:/usr/local/spark/jars/calcite-core-1.2.0-incubating.jar,file:/usr/local/spark/jars/hadoop-auth-2.6.4.jar,file:/usr/local/spark/jars/breeze_2.11-0.11.2.jar,file:/usr/local/spark/jars/spark-unsafe_2.11-2.0.1.jar,file:/usr/local/spark/jars/spark-streaming_2.11-2.0.1.jar,file:/usr/local/spark/jars/spark-core_2.11-2.0.1.jar,file:/usr/local/spark/jars/xercesImpl-2.9.1.jar,file:/usr/local/spark/jars/hadoop-mapreduce-client-core-2.6.4.jar,file:/usr/local/spark/jars/java-xmlbuilder-1.0.jar,file:/usr/local/spark/jars/objenesis-2.1.jar,file:/usr/local/spark/jars/hk2-locator-2.4.0-b34.jar,file:/usr/local/spark/jars/hadoop-mapreduce-client-shuffle-2.6.4.jar,file:/usr/local/spark/jars/xmlenc-0.52.jar,file:/usr/local/spark/jars/spark-yarn_2.11-2.0.1.jar,file:/usr/local/spark/jars/parquet-encoding-1.7.0.jar,file:/usr/local/spark/jars/json4s-core_2.11-3.2.11.jar,file:/usr/local/spark/jars/aopalliance-repackaged-2.4.0-b34.jar,file:/usr/local/spark/jars/snappy-0.2.jar,file:/usr/local/spark/jars/antlr-2.7.7.jar,file:/usr/local/spark/jars/jackson-core-asl-1.9.13.jar,file:/usr/local/spark/jars/hive-cli-1.2.1.spark2.jar,file:/usr/local/spark/jars/hadoop-yarn-client-2.6.4.jar,file:/usr/local/spark/jars/jersey-container-servlet-core-2.22.2.jar,file:/usr/local/spark/jars/scala-library-2.11.8.jar,file:/usr/local/spark/jars/guice-3.0.jar,file:/usr/local/spark/jars/spark-sketch_2.11-2.0.1.jar,file:/usr/local/spark/jars/chill-java-0.8.0.jar,file:/usr/local/spark/jars/hadoop-yarn-server-common-2.6.4.jar,file:/usr/local/spark/jars/eigenbase-properties-1.1.5.jar,file:/usr/local/spark/jars/parquet-hadoop-1.7.0.jar,file:/usr/local/spark/jars/stax-api-1.0.1.jar,file:/usr/local/spark/jars/netty-3.8.0.Final.jar,file:/usr/local/spark/jars/jackson-module-scala_2.11-2.6.5.jar,file:/usr/local/spark/jars/mesos-0.21.1-shaded-protobuf.jar,file:/usr/local/spark/jars/httpclient-4.5.2.jar,file:/usr/local/spark/jars/hadoop-yarn-api-2.6.4.jar,file:/usr/local/spark/jars/curator-framework-2.6.0.jar,file:/usr/local/spark/jars/commons-net-2.2.jar,file:/usr/local/spark/jars/parquet-jackson-1.7.0.jar,file:/usr/local/spark/jars/htrace-core-3.0.4.jar,file:/usr/local/spark/jars/jpam-1.1.jar,file:/usr/local/spark/jars/jersey-media-jaxb-2.22.2.jar,file:/usr/local/spark/jars/spark-network-common_2.11-2.0.1.jar,file:/usr/local/spark/jars/minlog-1.3.0.jar,file:/usr/local/spark/jars/jdo-api-3.0.1.jar,file:/usr/local/spark/jars/snappy-java-1.1.2.6.jar,file:/usr/local/spark/jars/spark-mllib_2.11-2.0.1.jar,file:/usr/local/spark/jars/javax.servlet-api-3.1.0.jar,file:/usr/local/spark/jars/scala-reflect-2.11.8.jar,file:/usr/local/spark/jars/jackson-jaxrs-1.9.13.jar,file:/usr/local/spark/jars/hadoop-yarn-common-2.6.4.jar,file:/usr/local/spark/jars/jets3t-0.9.3.jar,file:/usr/local/spark/jars/RoaringBitmap-0.5.11.jar,file:/usr/local/spark/jars/jodd-core-3.5.2.jar,file:/usr/local/spark/jars/apacheds-i18n-2.0.0-M15.jar,file:/usr/local/spark/jars/metrics-jvm-3.1.2.jar,file:/usr/local/spark/jars/ST4-4.0.4.jar,file:/usr/local/spark/jars/hive-metastore-1.2.1.spark2.jar,file:/usr/local/spark/jars/xz-1.0.jar,file:/usr/local/spark/jars/scalap-2.11.8.jar,file:/usr/local/spark/jars/guava-14.0.1.jar,file:/usr/local/spark/jars/commons-logging-1.1.3.jar,file:/usr/local/spark/jars/jersey-client-2.22.2.jar,file:/usr/local/spark/jars/jackson-annotations-2.6.5.jar,file:/usr/local/spark/jars/spark-launcher_2.11-2.0.1.jar,file:/usr/local/spark/jars/antlr4-runtime-4.5.3.jar,file:/usr/local/spark/jars/datanucleus-rdbms-3.2.9.jar,file:/usr/local/spark/jars/hive-jdbc-1.2.1.spark2.jar,file:/usr/local/spark/jars/py4j-0.10.3.jar,file:/usr/local/spark/jars/spark-graphx_2.11-2.0.1.jar,file:/usr/local/spark/jars/spark-hive_2.11-2.0.1.jar,file:/usr/local/spark/jars/hk2-utils-2.4.0-b34.jar,file:/usr/local/spark/jars/curator-recipes-2.6.0.jar,file:/usr/local/spark/jars/core-1.1.2.jar,file:/usr/local/spark/jars/parquet-generator-1.7.0.jar,file:/usr/local/spark/jars/stream-2.7.0.jar,file:/usr/local/spark/jars/mx4j-3.0.2.jar,file:/usr/local/spark/jars/netty-all-4.0.29.Final.jar,file:/usr/local/spark/jars/validation-api-1.1.0.Final.jar,file:/usr/local/spark/jars/janino-2.7.8.jar,file:/usr/local/spark/jars/jersey-container-servlet-2.22.2.jar,file:/usr/local/spark/jars/univocity-parsers-2.1.1.jar,file:/usr/local/spark/jars/gson-2.2.4.jar,file:/usr/local/spark/jars/scala-compiler-2.11.8.jar,file:/usr/local/spark/jars/jackson-databind-2.6.5.jar,file:/usr/local/spark/jars/jackson-module-paranamer-2.6.5.jar,file:/usr/local/spark/jars/calcite-linq4j-1.2.0-incubating.jar,file:/usr/local/spark/jars/json4s-jackson_2.11-3.2.11.jar,file:/usr/local/spark/jars/hk2-api-2.4.0-b34.jar,file:/usr/local/spark/jars/jsr305-1.3.9.jar,file:/usr/local/spark/jars/jackson-mapper-asl-1.9.13.jar,file:/usr/local/spark/jars/commons-codec-1.10.jar,file:/usr/local/spark/jars/spark-network-shuffle_2.11-2.0.1.jar,file:/usr/local/spark/jars/commons-collections-3.2.2.jar,file:/usr/local/spark/jars/kryo-shaded-3.0.3.jar,file:/usr/local/spark/jars/spark-hive-thriftserver_2.11-2.0.1.jar,file:/usr/local/spark/jars/libfb303-0.9.2.jar,file:/usr/local/spark/jars/hadoop-client-2.6.4.jar,file:/usr/local/spark/jars/antlr-runtime-3.4.jar,file:/usr/local/spark/jars/httpcore-4.4.4.jar,file:/usr/local/spark/jars/parquet-column-1.7.0.jar,file:/usr/local/spark/jars/commons-httpclient-3.1.jar,file:/usr/local/spark/jars/hadoop-mapreduce-client-common-2.6.4.jar,file:/usr/local/spark/jars/bonecp-0.8.0.RELEASE.jar,file:/usr/local/spark/jars/hive-beeline-1.2.1.spark2.jar,file:/usr/local/spark/jars/zookeeper-3.4.6.jar,file:/usr/local/spark/jars/jetty-6.1.26.jar,file:/usr/local/spark/jars/pmml-schema-1.2.15.jar,file:/usr/local/spark/jars/breeze-macros_2.11-0.11.2.jar,file:/usr/local/spark/jars/jersey-guava-2.22.2.jar,file:/usr/local/spark/jars/commons-configuration-1.6.jar,file:/usr/local/spark/jars/javax.ws.rs-api-2.0.1.jar,file:/usr/local/spark/jars/leveldbjni-all-1.8.jar,file:/usr/local/spark/jars/jackson-xc-1.9.13.jar,file:/usr/local/spark/jars/parquet-format-2.3.0-incubating.jar,file:/usr/local/spark/jars/commons-compress-1.4.1.jar,file:/usr/local/spark/jars/hadoop-annotations-2.6.4.jar,file:/usr/local/spark/jars/commons-dbcp-1.4.jar,file:/usr/local/spark/jars/stax-api-1.0-2.jar,file:/usr/local/spark/jars/lz4-1.3.0.jar,file:/usr/local/spark/jars/JavaEWAH-0.3.2.jar,file:/usr/local/spark/jars/commons-cli-1.2.jar,file:/usr/local/spark/jars/opencsv-2.3.jar,file:/usr/local/spark/jars/aopalliance-1.0.jar,file:/usr/local/spark/jars/javassist-3.18.1-GA.jar,file:/usr/local/spark/jars/apache-log4j-extras-1.2.17.jar,file:/usr/local/spark/jars/javax.inject-2.4.0-b34.jar,file:/usr/local/spark/jars/apacheds-kerberos-codec-2.0.0-M15.jar,file:/usr/local/spark/jars/oro-2.0.8.jar,file:/usr/local/spark/jars/jline-2.12.1.jar,file:/usr/local/spark/jars/commons-compiler-2.7.6.jar,file:/usr/local/spark/jars/jersey-common-2.22.2.jar,file:/usr/local/spark/jars/metrics-json-3.1.2.jar,file:/usr/local/spark/jars/commons-pool-1.5.4.jar,file:/usr/local/spark/jars/jtransforms-2.4.0.jar,file:/usr/local/spark/jars/jaxb-api-2.2.2.jar,file:/usr/local/spark/jars/protobuf-java-2.5.0.jar,file:/usr/local/spark/jars/base64-2.3.8.jar,file:/usr/local/spark/jars/libthrift-0.9.2.jar,file:/usr/local/spark/jars/hadoop-mapreduce-client-jobclient-2.6.4.jar,file:/usr/local/spark/jars/jackson-core-2.6.5.jar,file:/usr/local/spark/jars/commons-math3-3.4.1.jar,file:/usr/local/spark/jars/bcprov-jdk15on-1.51.jar,file:/usr/local/spark/jars/spark-mllib-local_2.11-2.0.1.jar,file:/usr/local/spark/jars/spark-catalyst_2.11-2.0.1.jar,file:/usr/local/spark/jars/commons-digester-1.8.jar,file:/usr/local/spark/jars/jul-to-slf4j-1.7.16.jar,file:/etc/hadoop/conf.pseudo/] and parent being sun.misc.Launcher$ExtClassLoader@69c67db of type class sun.misc.Launcher$ExtClassLoader with classpath [file:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/ext/dnsns.jar,file:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/ext/localedata.jar,file:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/ext/sunjce_provider.jar,file:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/ext/java-atk-wrapper.jar,file:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/ext/libatk-wrapper.so,file:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/ext/sunpkcs11.jar,file:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/ext/zipfs.jar,file:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/ext/icedtea-sound.jar] and parent being primordial classloader with boot classpath [/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/resources.jar:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/rt.jar:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/sunrsasign.jar:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/jsse.jar:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/jce.jar:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/charsets.jar:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/rhino.jar:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/jfr.jar:/usr/lib/jvm/java-7-openjdk-amd64/jre/classes] not found. at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:123) at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:22) at com.test.main.TestMain$$typecreator7$1.apply(TestMain.scala:50) at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:232) at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:232) at org.apache.spark.sql.SQLImplicits$$typecreator9$1.apply(SQLImplicits.scala:125) at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:232) at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:232) at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$.apply(ExpressionEncoder.scala:49) at org.apache.spark.sql.SQLImplicits.newProductSeqEncoder(SQLImplicits.scala:125) at com.test.main.TestMain$.main(TestMain.scala:50) at com.test.main.TestMain.main(TestMain.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:736) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:185) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:210) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:124) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
找到了一个解决方案,将参数添加为--driver-class-path并将其与程序集jar一起提供
例如:
spark-submit --master local --driver-class-path file:///home/test/test.jar --class com.test.TestMain test.jar
但是,这适用于yarn客户端和spark本地模式。找不到一个优雅的解决纱线簇模式。
我正在使用Spark(Java)读取和写入DB。我正在使用Spark的内置集群管理器。该应用程序是作为胖罐的捆绑包,并通过Spark提交命令运行: 任务运行正常,应用程序日志中没有异常,或者在命令行上发出spark submit时也没有异常。以下是运行spark submit的整个标准输出: /spark提交——com类。tte公司。地点部门每日的主要的驱动程序--主spark://ip-xxx-
好吧,我是新来的Java,并试图从命令提示符运行一个java程序(因为Sublime编译和运行它的小东西,但不允许用户输入和东西)。 我将命令提示符设置为我有我的文件的文件夹,称为Learner.java. 我在环境变量中为JDK bin设置了一个路径,并创建了一个JAVA_HOME变量,其中包含JDK bin的值(其他一些帖子建议)。 在我讨论这个问题之前,这是我的代码: 如你所见,这是一个以“
问题内容: 到目前为止,我主要使用eclipse。现在,我正在尝试从终端运行Java,但程序包存在问题。 这是我的文件: 我使用编译此代码,然后运行,它给我: 当我删除一切正常。我想念什么? 给出: 问题答案: 您需要在一个目录级别上运行java命令,并以完全合格的软件包名称提供它,例如: 请参阅Java Launcher如何查找用户类 以了解其工作方式。
我遇到了从命令提示符运行java程序的问题。我有一个名为DataRecover的java文件,还有一个名为Triple的java文件。现在,当我在命令提示符下运行javac triple.java时,它会执行它应该执行的操作。但是,当我运行javac DataRecover.java时,它会出现这样的错误消息:“线程”main“java.lang.noClassDeffounderRror:Dat
问题内容: 我已经阅读了以前发布的问题。有些含糊不清,没有一个解决我的问题,所以我不得不再问一次。 我有两个简单的课程, 另一类是 我在Windows cmd中的基本文件夹“ basic”中。我用编译 将创建一个文件夹和子文件夹。 这会产生大量错误。许多答案旨在指定无效的完整路径。我的班级在One中,因此使用-cp指定One也不起作用。 问题答案: 您将其运行为: …但是从 根 目录(), 不是
我试着从命令行访问mvn。 我试图验证Maven是否如手册所说通过MVN--version进行了良好配置,但结果是: 这会是什么?我是否应该在每次需要变量时导出它们?