2017-03-16 128 views
3

用Java寫的星火計劃項目,代碼如下:缺少應用程序資源

import org.apache.spark.SparkConf; 
import org.apache.spark.api.java.JavaRDD; 
import org.apache.spark.api.java.JavaSparkContext; 
import org.apache.spark.api.java.function.Function; 

public class SimpleApp { 
    public static void main(String[] args) { 
     SparkConf conf = new SparkConf().setAppName("wordCount").setMaster("local"); 
     JavaSparkContext sc = new JavaSparkContext(conf); 
     JavaRDD<String> input = sc.textFile("/bigdata/softwares/spark-2.1.0-bin-hadoop2.7/testdata/a.txt"); 
     System.out.println(); 
     Long bCount = input.filter(new Function<String,Boolean>(){ 
     public Boolean call(String s){return s.contains("yes");} 
     }).count(); 
     Long cCount = input.filter(new Function<String,Boolean>(){ 
     public Boolean call(String s){return s.contains("ywq");} 
     }).count(); 
     System.out.println("yes:"+bCount+" ywq:"+cCount+" all:"); 
// sc.stop(); 
    } 
} 

雙龍如下:

<dependencies> 
     <dependency> <!-- Spark dependency --> 
      <groupId>org.apache.spark</groupId> 
      <artifactId>spark-core_2.11</artifactId> 
      <version>2.1.0</version> 
     </dependency> 
    </dependencies> 
    <build> 
     <plugins>  
      <plugin> 
       <groupId>org.apache.maven.plugins</groupId> 
       <artifactId>maven-shade-plugin</artifactId> 
       <version>2.3</version> 
       <executions> 
        <execution> 
         <phase>package</phase> 
         <goals> 
          <goal>shade</goal> 
         </goals> 
        </execution> 
       </executions> 
      </plugin> 
     </plugins> 
    </build> 

出現以下錯誤 Maven的所有資源都打包成jar文件,運行時報出了以下錯誤,我只是s tarted學習,誰知道教,謝謝 enter image description here

回答

0

你也必須與指定主類火花提交

spark-submit --class <your.package>.SimpleApp testjar/spark-0.0.1-SNAPSHOT.jar 
0

您將需要與主沿着指定主類

./bin/spark-submit --class package.name.MainClass --master local[2] /testjar/spark-0.0.1-SNAPSHOT.jar