我在intellij idea运行一个spark 程序,每次都会遇到out of memory 错误,然后我设置了intellij idea 的jvm参数,在/Applications/IntelliJ IDEA 15.app/Contents/bin
有一个idea.vmoptions
文件,修改如下:
-Xms2g
-Xmx6g
-XX:MaxNewSize=256m
-XX:MaxPermSize=512m
-XX:ReservedCodeCacheSize=1024m
-XX:+UseCompressedOops
然后重启intellij idea,运行工程还是oom 错误。
然后我在run tab下的Edit Configuration 选下下配置了vm options:
-XX:MaxNewSize=256m -XX:MaxPermSize=512m
再次运行程序,没有oom错误。
我用的是jdk7,spark是一个很简单的例子程序:
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.sql.hive.HiveContext
object SparkArchetype {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("TestSparkHQL").setMaster("local");
//local模式仅供在本地测试,若要运行在集群,请删除setMaster代码
// conf.setMaster("local")
val sc = new SparkContext(conf)
val sqlContext = new HiveContext(sc)
val arr = sc.parallelize(1 to 8 toList).filter(_ % 2 == 0).take(3)
println(arr.mkString(", "))
sc.stop()
}
}
异常日志如下:
17/08/03 16:47:08 INFO ObjectStore: Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes="Table,StorageDescriptor,SerDeInfo,Partition,Database,Type,FieldSchema,Order"
Exception in thread "main" java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at org.apache.spark.sql.hive.client.IsolatedClientLoader.liftedTree1$1(IsolatedClientLoader.scala:183)
at org.apache.spark.sql.hive.client.IsolatedClientLoader.<init>(IsolatedClientLoader.scala:179)
at org.apache.spark.sql.hive.HiveContext.metadataHive$lzycompute(HiveContext.scala:226)
at org.apache.spark.sql.hive.HiveContext.metadataHive(HiveContext.scala:185)
at org.apache.spark.sql.hive.HiveContext.setConf(HiveContext.scala:392)
at org.apache.spark.sql.hive.HiveContext.defaultOverrides(HiveContext.scala:174)
at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:177)
at com.meituan.mthdp.sparktools.sometools.SparkArchetype$.main(SparkArchetype.scala:12)
at com.meituan.mthdp.sparktools.sometools.SparkArchetype.main(SparkArchetype.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:144)
Caused by: java.lang.OutOfMemoryError: PermGen space
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:800)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)
at java.net.URLClassLoader.access$100(URLClassLoader.java:71)
at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1.doLoadClass(IsolatedClientLoader.scala:165)
at org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1.loadClass(IsolatedClientLoader.scala:153)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
at org.apache.hadoop.hive.ql.metadata.HiveException.<init>(HiveException.java:31)
at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1236)
at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
at org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:171)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at org.apache.spark.sql.hive.client.IsolatedClientLoader.liftedTree1$1(IsolatedClientLoader.scala:183)
at org.apache.spark.sql.hive.client.IsolatedClientLoader.<init>(IsolatedClientLoader.scala:179)
at org.apache.spark.sql.hive.HiveContext.metadataHive$lzycompute(HiveContext.scala:226)
at org.apache.spark.sql.hive.HiveContext.metadataHive(HiveContext.scala:185)
at org.apache.spark.sql.hive.HiveContext.setConf(HiveContext.scala:392)
at org.apache.spark.sql.hive.HiveContext.defaultOverrides(HiveContext.scala:174)
at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:177)
at com.meituan.mthdp.sparktools.sometools.SparkArchetype$.main(SparkArchetype.scala:12)
at com.meituan.mthdp.sparktools.sometools.SparkArchetype.main(SparkArchetype.scala)
Exception in thread "Thread-2" java.lang.OutOfMemoryError: PermGen space
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:800)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)
at java.net.URLClassLoader.access$100(URLClassLoader.java:71)
at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
at org.apache.log4j.spi.LoggingEvent.<init>(LoggingEvent.java:165)
at org.apache.log4j.Category.forcedLog(Category.java:391)
at org.apache.log4j.Category.log(Category.java:856)
at org.slf4j.impl.Log4jLoggerAdapter.log(Log4jLoggerAdapter.java:601)
at org.apache.commons.logging.impl.SLF4JLocationAwareLog.warn(SLF4JLocationAwareLog.java:199)
at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:56)
哪位大神知道是怎么回事吗
你试试改一下运行程序的jvm参数,你这改的是启动ide的运行参数。