2016-09-19 112 views
2

我在scala中有一個簡單的Spark程序,但有下面的代碼,但得到異常。所有即時消息嘗試執行的是運行主代碼。 我還包括gradle配置。 任何幫助將不勝感激。java.lang.ClassNotFoundException Spark Scala

錯誤: -

Exception in thread "main" java.lang.NoClassDefFoundError: com/fasterxml/jackson/module/scala/DefaultScalaModule$ 
    at org.apache.spark.SparkContext.withScope(SparkContext.scala:714) 
    at org.apache.spark.SparkContext.parallelize(SparkContext.scala:728)... 
Caused by: java.lang.ClassNotFoundException: com.fasterxml.jackson.module.scala.DefaultScalaModule$ 
    at java.net.URLClassLoader.findClass(URLClassLoader.java:381) 
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424) 
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) 
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357) 

主營: -

def main(args: Array[String]) { 

    val conf = new SparkConf() 
     .setAppName("TempratureRDD") 
     .setMaster("local[2]") 
     .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") 
    val sc = new SparkContext(conf) 
    print("___________________________________________________________________________________________") 

    val vertexArray = Array(
     (1L, ("Sensor1", 28)), 
     (2L, ("Sensor2", 27)), 
     (3L, ("Sensor3", 65)), 
     (4L, ("Sensor4", 42)), 
     (5L, ("Sensor5", 55)), 
     (6L, ("Sensor6", 50)) 
    ) 
    val edgeArray = Array(
     Edge(2L, 1L, 7), 
     Edge(2L, 4L, 2), 
     Edge(3L, 2L, 4), 
     Edge(3L, 6L, 3), 
     Edge(4L, 1L, 1), 
     Edge(5L, 2L, 2), 
     Edge(5L, 3L, 8), 
     Edge(5L, 6L, 3) 
    ) 




    val vertexRDD: RDD[(Long, (String, Int))] = sc.parallelize(vertexArray) 
    val edgeRDD: RDD[Edge[Int]] = sc.parallelize(edgeArray) 

    val graph: Graph[(String, Int), Int] = Graph(vertexRDD, edgeRDD) 

    for ((id,(name,age)) <- graph.vertices.filter { case (id,(name,age)) => age > 30 }.collect) { 
     println(s"$name is $age") 
    } 

    } 

的build.gradle: -

dependencies { 

compile fileTree(dir: 'lib', include: ['*.jar']) 
// The production code uses the SLF4J logging API at compile time 
compile 'org.slf4j:slf4j-api:1.7.12' 
compile 'org.scala-lang:scala-library:2.11.8' 
testCompile 'junit:junit:4.12' 
compile 'com.sparkjava:spark-core:2.5' 
// https://mvnrepository.com/artifact/org.apache.spark/spark-streaming_2.11 
compile group: 'org.apache.spark', name: 'spark-streaming_2.11', version: '1.6.0' 
// https://mvnrepository.com/artifact/org.apache.spark/spark-streaming-mqtt_2.10 
compile group: 'org.apache.spark', name: 'spark-streaming-mqtt_2.10', version: '1.6.2' 
// https://mvnrepository.com/artifact/org.eclipse.paho/org.eclipse.paho.client.mqttv3 
compile group: 'org.eclipse.paho', name: 'org.eclipse.paho.client.mqttv3', version: '1.1.0' 
// https://mvnrepository.com/artifact/com.google.code.gson/gson 
compile group: 'com.google.code.gson', name: 'gson', version: '2.7' 
// https://mvnrepository.com/artifact/org.apache.spark/spark-graphx_2.10 
compile group: 'org.apache.spark', name: 'spark-graphx_2.10', version: '2.0.0' 

}

沒有其他依賴

+2

所有你混合使用關聯階版本的第一次......你與'2.11.8'編譯Building Spark和 那麼你有'火花graphx_2.10 '和'spark-streaming-mqtt_2.10',它們是針對scala 2.10.x編譯的。首先解決這個問題...並且在後面擔心所有其他問題。 –

回答