ClassNotFoundException in



我有以下代码。我已经将其转换为jar

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.SQLContext

case class Student (StudentID: Long,
                                creationdate : java.sql.Timestamp,
                                mark1 : String,
                                mark2 : String
                               )
object Main {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("EMRCustomerApplication")
    conf.set("spark.driver.allowMultipleContexts","true")
    val spark = SparkSession
      .builder()
      .appName("Spark In Action")
      .master("local")
      .enableHiveSupport()
      .getOrCreate()
    System.setProperty("hive.metastore.uris", "thrift://internal-shared-hive-metastore-elb-550897717.us-west-2.elb.amazonaws.com:9083")
    System.setProperty("hive.metastore.execute.setugi", "false")
    System.setProperty("spark.driver.allowMultipleContexts","true")
    val sc = new SparkContext(conf)
    conf.set("spark.driver.allowMultipleContexts","true")
    import spark.implicits._
    val allRecordsDF = spark.sql(""" select * fromstudent """)

  }
}

我得到以下例外。我认为 - 班级选项是提及火花罐的主类。这是我执行的命令。

spark-submit --class "Main" s3://Users/test/test_2.10-1.0.jar 

我做到了。任何人都可以看到问题所在。

java.lang.ClassNotFoundException: Main
    at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
    at java.lang.Class.forName0(Native Method)
    at java.lang.Class.forName(Class.java:348)
    at org.apache.spark.util.Utils$.classForName(Utils.scala:229)
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:695)
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Command exiting with ret '101'

将班级放在软件包下,然后尝试运行该程序包的包装。

相关内容

  • 没有找到相关文章

最新更新