The Scala application with Scala 2.11.12 is throwing following error while executing certain set of code
The environment configurations are as follow:
Scala IDE with Eclipse: version 4.7
Eclipse Version: 2019-06 (4.12.0)
Spark Version: 2.4.4
Java Version: "1.8.0_221"
However the same set of configuration is working fine in eclipse IDE with Scala version 2.11.11
Exception in thread "main" java.lang.NumberFormatException: Not a version: 9
at scala.util.PropertiesTrait$class.parts$1(Properties.scala:184)
at scala.util.PropertiesTrait$class.isJavaAtLeast(Properties.scala:187)
at scala.util.Properties$.isJavaAtLeast(Properties.scala:17)
at scala.tools.util.PathResolverBase$Calculated$.javaBootClasspath(PathResolver.scala:276)
at scala.tools.util.PathResolverBase$Calculated$.basis(PathResolver.scala:283)
at scala.tools.util.PathResolverBase$Calculated$.containers$lzycompute(PathResolver.scala:293)
at scala.tools.util.PathResolverBase$Calculated$.containers(PathResolver.scala:293)
at scala.tools.util.PathResolverBase.containers(PathResolver.scala:309)
at scala.tools.util.PathResolver.computeResult(PathResolver.scala:341)
at scala.tools.util.PathResolver.computeResult(PathResolver.scala:332)
at scala.tools.util.PathResolverBase.result(PathResolver.scala:314)
at scala.tools.nsc.backend.JavaPlatform$class.classPath(JavaPlatform.scala:28)
at scala.tools.nsc.Global$GlobalPlatform.classPath(Global.scala:115)
at scala.tools.nsc.Global.scala$tools$nsc$Global$$recursiveClassPath(Global.scala:131)
at scala.tools.nsc.Global.classPath(Global.scala:128)
at scala.tools.nsc.backend.jvm.BTypesFromSymbols.<init>(BTypesFromSymbols.scala:39)
at scala.tools.nsc.backend.jvm.BCodeIdiomatic.<init>(BCodeIdiomatic.scala:24)
at scala.tools.nsc.backend.jvm.BCodeHelpers.<init>(BCodeHelpers.scala:23)
at scala.tools.nsc.backend.jvm.BCodeSkelBuilder.<init>(BCodeSkelBuilder.scala:25)
at scala.tools.nsc.backend.jvm.BCodeBodyBuilder.<init>(BCodeBodyBuilder.scala:25)
at scala.tools.nsc.backend.jvm.BCodeSyncAndTry.<init>(BCodeSyncAndTry.scala:21)
at scala.tools.nsc.backend.jvm.GenBCode.<init>(GenBCode.scala:47)
at scala.tools.nsc.Global$genBCode$.<init>(Global.scala:675)
at scala.tools.nsc.Global.genBCode$lzycompute(Global.scala:671)
at scala.tools.nsc.Global.genBCode(Global.scala:671)
at scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder.serialVUID(GenASM.scala:1240)
at scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder.genClass(GenASM.scala:1329)
at scala.tools.nsc.backend.jvm.GenASM$AsmPhase.emitFor$1(GenASM.scala:198)
at scala.tools.nsc.backend.jvm.GenASM$AsmPhase.run(GenASM.scala:204)
at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1528)
at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1513)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl$ToolBoxGlobal.wrapInPackageAndCompile(ToolBoxFactory.scala:197)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl$ToolBoxGlobal.compile(ToolBoxFactory.scala:252)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl$$anonfun$compile$2.apply(ToolBoxFactory.scala:429)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl$$anonfun$compile$2.apply(ToolBoxFactory.scala:422)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl$withCompilerApi$.liftedTree2$1(ToolBoxFactory.scala:355)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl$withCompilerApi$.apply(ToolBoxFactory.scala:355)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl.compile(ToolBoxFactory.scala:422)
at com.slb.itdataplatform.dq.DataQualityValidation$$anonfun$compile$1.apply(DataQualityValidation.scala:112)
at scala.util.Try$.apply(Try.scala:192)
at com.slb.itdataplatform.dq.DataQualityValidation$.compile(DataQualityValidation.scala:109)
at com.slb.itdataplatform.dq.DataQualityValidation$.generateVerifier(DataQualityValidation.scala:104)
at com.slb.itdataplatform.dq.DataQualityValidation$.main(DataQualityValidation.scala:49)
at com.slb.itdataplatform.dq.DataQualityValidation.main(DataQualityValidation.scala)
I can work on the same set of environment configuration but Spark 2.4.4
underlined Scala version is 2.11.12
hence I want to use the same in my application to avoid any conflicts.(As my spark apps are not initializing Unable to initialize Spark job)
What could be the possible root cause for this error and how it can be resolved?