当前位置: 首页 > 工具软件 > VelocyPack > 使用案例 >

spark写arango_com.arangodb.velocypack.exception.VPackParserException: java.lang.InstantiationExceptio...

司徒正信
2023-12-01

2017-01-11 16:10:02

0

i am trying to connect my ArangoDb database with scala, but when the connection is made, and i am doing the operation, i have an VPackParserException error. My code:

import com.arangodb.spark.{ArangoSpark, ReadOptions}

import org.apache.spark.SparkContext

import org.apache.spark.SparkConf

import scala.beans.BeanProperty

object ArangotoSpark {

def main(args: Array[String]) {

case class netflow(@BeanProperty SrcHost: String,

@BeanProperty DstHost: String,

@BeanProperty SrcPort: String,

@BeanProperty DstPort: String,

@BeanProperty Protocol: String,

@BeanProperty StartTS: String,

@BeanProperty EndTS: String,

@BeanProperty Packets: Int,

@BeanProperty Bytes: Int) { }

val conf = new SparkConf().setAppName("Simple Application").setMaster("local[*]")

.set("arangodb.host", "127.0.0.2")

.set("arangodb.port", "8529")

.set("arangodb.user", "root")

.set("arangodb.password", "rut")

.set("arangodb.database", "netflow")

val sc = new SparkContext(conf)

val rdd = ArangoSpark.load[netflow](sc, "N201701031130", ReadOptions("netflow"))

val rdd2 = rdd.filter { x => x.SrcHost.matches("15.33.165.30") }

rdd2.count()

}

}

Any help is appreciated. Thank you.

UPDATE: Now my code looks like this:

case class nf (@BeanProperty cadena: String){

def this() = this(cadena = null)}

val rdd = ArangoSpark.load[nf](sc, "N201701031130", ReadOptions("netflow"))

println(rdd.count())

println("*************************************")

rdd.collect.foreach(println(_))

rdd.count gives the correct number of documents, but when i try to print them, i only have nf(null) lines

 类似资料: