线程"main" scala 中的异常。ScalaReflectionException



这是我连接两个数据帧的代码

package org.test.rddjoins
import org.apache.spark.SparkConf
import org.apache.spark.SparkConf
import org.apache.spark._
import org.apache.spark.rdd.RDD

object rdd {
case class Score(name: String, score: Int)
case class Age(name: String, age: Int)
def main(args: Array[String]) {
val sparkConf = new SparkConf()
  .setAppName("rdd")
  .setMaster("local[2]")
val sc = new SparkContext(sparkConf)
val sqlContext = new org.apache.spark.sql.SQLContext(sc)
import sqlContext._

val scores = sc.textFile("scores.txt").map(_.split(",")).map(s =>     Score(s(0), s(1).trim.toInt))
val ages = sc.textFile("ages.txt").map(_.split(",")).map(s => Age(s(0), s(1).trim.toInt))
scores.registerAsTable("scores")
ages.registerAsTable("ages")
val joined = sqlContext.sql("""
SELECT a.name, a.age, s.score
FROM ages a JOIN scores s
ON a.name = s.name""")
joined.collect().foreach(println)
}
}

运行时出现以下错误:

Exception in thread "main" scala.ScalaReflectionException: class org.apache.spark.sql.catalyst.ScalaReflection in JavaMirror with primordial classloader with boot classpath [C:UsersOwnerDownloadsCompressedeclipsepluginsorg.scala-lang.scala-library_2.11.8.v20160304-115712-1706a37eb8.jar;C:UsersOwnerDownloadsCompressedeclipsepluginsorg.scala-lang.scala-reflect_2.11.8.v20160304-115712-1706a37eb8.jar;C:Program FilesJavajdk1.8.0_77jrelibresources.jar;C:Program FilesJavajdk1.8.0_77jrelibrt.jar;C:Program FilesJavajdk1.8.0_77jrelibsunrsasign.jar;C:Program FilesJavajdk1.8.0_77jrelibjsse.jar;C:Program FilesJavajdk1.8.0_77jrelibjce.jar;C:Program FilesJavajdk1.8.0_77jrelibcharsets.jar;C:Program FilesJavajdk1.8.0_77jrelibjfr.jar;C:Program FilesJavajdk1.8.0_77jreclasses] not found.
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:123)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:22)
    at org.apache.spark.sql.catalyst.ScalaReflection$$typecreator1$1.apply(ScalaReflection.scala:115)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:232)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:232)
    at scala.reflect.api.TypeTags$class.typeOf(TypeTags.scala:341)
    at scala.reflect.api.Universe.typeOf(Universe.scala:61)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:115)
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:33)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:100)
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:33)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.attributesFor(ScalaReflection.scala:94)
    at org.apache.spark.sql.catalyst.ScalaReflection$.attributesFor(ScalaReflection.scala:33)
    at org.apache.spark.sql.SQLContext.createSchemaRDD(SQLContext.scala:111)
    at org.test.rddjoins.rdd$.main(rdd.scala:27)

救命!!!

类路径中缺少Apache Spark库。

异常表示未在classpath中找到一个与火花相关的类。您应该修改classpath以添加指定的jar。

相关内容

  • 没有找到相关文章

最新更新