spark-submit 提交任务报错 java.lang.ClassNotFoundException: Demo02

  • Post author:
  • Post category:java


案例:把sparksql的程序提交到spark的单机模式下执行


package demo01

import org.apache.spark.SparkContext

import org.apache.spark.sql.SQLContext

import org.apache.spark.SparkConf

object Demo02 {


def main(args: Array[String]): Unit = {


//创建配置

val conf = new SparkConf()

conf.setAppName(“Demo02”)      //创建应用程序first

conf.setMaster(“local”)

//                conf.set(“spark.shuffle.manager”,”hash”)

//基于配置生成sc

val sc = new SparkContext(conf)

//创建出sqlContext

val sqlContext = new SQLContext(sc)

import sqlContext.implicits._

val rdd = sc.makeRDD(List((1,”zhang”),(2,”li”),(3,”wang”)))

val df = rdd.toDF(“id”,”name”)

//                df.registerTempTable(“tabx”);

df.createTempView(“tabx”)

val df2 = sqlContext.sql(“select * from tabx order by name”)

val rdd2 = df2.toJavaRDD.repartition(1)

rdd2.saveAsTextFile(“file:///home/hadoop/df.txt”);

//                rdd2.saveAsTextFile(“d:/df”)

sc.stop()

}

}


打成jar–> sql.jar

提交集群运行  ./spark-submit –class Demo02 ./sql.jar

java.lang.ClassNotFoundException: Demo02

at java.net.URLClassLoader.findClass(URLClassLoader.java:381)

at java.lang.ClassLoader.loadClass(ClassLoader.java:424)

at java.lang.ClassLoader.loadClass(ClassLoader.java:357)

at java.lang.Class.forName0(Native Method)

at java.lang.Class.forName(Class.java:348)

at org.apache.spark.util.Utils$.classForName(Utils.scala:230)

at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:732)

at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)

at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)

at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)

at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

解决办法:添加类的全路径名称  ./spark-submit –class demo01.Demo02 ./sql.jar



版权声明:本文为tryll原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。