Wie Konfiguriere ich die GARN-Adresse für Garn-client-Modus in der Funke?

Von einem remote-scala-Programm, mit Spark 1.3, wie Initialisiere ich die sparkContext so dass ich kann eine Verbindung zu Spark läuft am GARN? also wo setze ich die Adresse des GARN-Knoten(s)?

Derzeit mein Programm enthält:

  val conf = new SparkConf().setMaster("yarn-client").setAppName("MyApp")
  val sc = new SparkContext(conf)

und es ergibt

[error] (run-main-0) java.lang.ExceptionInInitializerError
java.lang.ExceptionInInitializerError
    at org.apache.spark.util.Utils$.getSparkOrYarnConfig(Utils.scala:1959)
    at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:104)
    at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:179)
    at org.apache.spark.SparkEnv$.create(SparkEnv.scala:310)
    at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:163)
    at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:269)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:272)
    at SparkExampleLocalDriver$.main(SparkExample.scala:9)
    at SparkExampleLocalDriver.main(SparkExample.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:606)
Caused by: org.apache.spark.SparkException: Unable to load YARN support
    at org.apache.spark.deploy.SparkHadoopUtil$.liftedTree1$1(SparkHadoopUtil.scala:217)
    at org.apache.spark.deploy.SparkHadoopUtil$.<init>(SparkHadoopUtil.scala:212)
    at org.apache.spark.deploy.SparkHadoopUtil$.<clinit>(SparkHadoopUtil.scala)
    at org.apache.spark.util.Utils$.getSparkOrYarnConfig(Utils.scala:1959)
    at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:104)
    at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:179)
    at org.apache.spark.SparkEnv$.create(SparkEnv.scala:310)
    at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:163)
    at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:269)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:272)
    at SparkExampleLocalDriver$.main(SparkExample.scala:9)
    at SparkExampleLocalDriver.main(SparkExample.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:606)
Caused by: java.lang.ClassNotFoundException: org.apache.spark.deploy.yarn.YarnSparkHadoopUtil
    at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
    at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
    at java.security.AccessController.doPrivileged(Native Method)
    at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
    at java.lang.Class.forName0(Native Method)
    at java.lang.Class.forName(Class.java:195)
    at org.apache.spark.deploy.SparkHadoopUtil$.liftedTree1$1(SparkHadoopUtil.scala:213)
    at org.apache.spark.deploy.SparkHadoopUtil$.<init>(SparkHadoopUtil.scala:212)
    at org.apache.spark.deploy.SparkHadoopUtil$.<clinit>(SparkHadoopUtil.scala)
    at org.apache.spark.util.Utils$.getSparkOrYarnConfig(Utils.scala:1959)
    at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:104)
    at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:179)
    at org.apache.spark.SparkEnv$.create(SparkEnv.scala:310)
    at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:163)
    at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:269)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:272)
    at SparkExampleLocalDriver$.main(SparkExample.scala:9)
    at SparkExampleLocalDriver.main(SparkExample.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:606)
  • Können Sie fügen Sie Ihre Spark-version?
  • guter Punkt, es ist 1.3
InformationsquelleAutor mitchus | 2015-05-18
Schreibe einen Kommentar