-
Notifications
You must be signed in to change notification settings - Fork 210
Closed
Description
https://github.com/apache/auron/actions/runs/21383141822/job/61554030266?pr=1958
- log function with negative input *** FAILED ***
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 119.0 failed 1 times, most recent failure: Lost task 0.0 in stage 119.0 (TID 288) (runnervmymu0l.lcg2avo1p2berav5sjxbnkio3f.bx.internal.cloudapp.net executor driver): org.apache.spark.executor.RemoteClassLoaderError: org.apache.spark.sql.catalyst.expressions.Object
at org.apache.spark.executor.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:117)
at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:593)
at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:580)
at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.java:40)
at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:526)
at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:75)
at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:317)
at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:8895)
at org.codehaus.janino.UnitCompiler.getRawReferenceType(UnitCompiler.java:7065)
at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:6974)
at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:6855)
at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:6826)
at org.codehaus.janino.UnitCompiler.access$14800(UnitCompiler.java:236)
at org.codehaus.janino.UnitCompiler$24.visitReferenceType(UnitCompiler.java:6724)
at org.codehaus.janino.UnitCompiler$24.visitReferenceType(UnitCompiler.java:6721)
at org.codehaus.janino.Java$ReferenceType.accept(Java.java:4289)
at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:6721)
at org.codehaus.janino.UnitCompiler.getRawType(UnitCompiler.java:6717)
at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:7189)
at org.codehaus.janino.UnitCompiler.access$14600(UnitCompiler.java:236)
at org.codehaus.janino.UnitCompiler$24.visitArrayType(UnitCompiler.java:6722)
at org.codehaus.janino.UnitCompiler$24.visitArrayType(UnitCompiler.java:6721)
at org.codehaus.janino.Java$ArrayType.accept(Java.java:4382)
at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:6721)
at org.codehaus.janino.UnitCompiler.getRawType(UnitCompiler.java:6717)
at org.codehaus.janino.UnitCompiler.access$1300(UnitCompiler.java:236)
at org.codehaus.janino.UnitCompiler$41.getParameterTypes2(UnitCompiler.java:10775)
at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:903)
at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1170)
at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:926)
at org.codehaus.janino.IClass.getIMethods(IClass.java:270)
at org.codehaus.janino.IClass.getIMethods(IClass.java:259)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:502)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:442)
at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:236)
at org.codehaus.janino.UnitCompiler$3.visitPackageMemberClassDeclaration(UnitCompiler.java:422)
at org.codehaus.janino.UnitCompiler$3.visitPackageMemberClassDeclaration(UnitCompiler.java:418)
at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1688)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:418)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:392)
at org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:236)
at org.codehaus.janino.UnitCompiler$2.visitCompilationUnit(UnitCompiler.java:363)
at org.codehaus.janino.UnitCompiler$2.visitCompilationUnit(UnitCompiler.java:361)
at org.codehaus.janino.Java$CompilationUnit.accept(Java.java:371)
at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:361)
at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:264)
at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:294)
at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:288)
at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:267)
at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:82)
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.doCompile(CodeGenerator.scala:1549)
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.$anonfun$cache$1(CodeGenerator.scala:1638)
at org.apache.spark.util.NonFateSharingCache$$anon$1.load(NonFateSharingCache.scala:68)
at org.sparkproject.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
at org.sparkproject.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
at org.sparkproject.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
at org.sparkproject.guava.cache.LocalCache$Segment.get(LocalCache.java:2068)
at org.sparkproject.guava.cache.LocalCache.get(LocalCache.java:3986)
at org.sparkproject.guava.cache.LocalCache.getOrLoad(LocalCache.java:4007)
at org.sparkproject.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946)
at org.apache.spark.util.NonFateSharingLoadingCache.$anonfun$get$2(NonFateSharingCache.scala:108)
at org.apache.spark.util.KeyLock.withLock(KeyLock.scala:64)
at org.apache.spark.util.NonFateSharingLoadingCache.get(NonFateSharingCache.scala:108)
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1490)
at org.apache.spark.sql.catalyst.expressions.codegen.GenerateSafeProjection$.create(GenerateSafeProjection.scala:205)
at org.apache.spark.sql.catalyst.expressions.codegen.GenerateSafeProjection$.create(GenerateSafeProjection.scala:39)
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:1415)
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:1412)
at org.apache.spark.sql.execution.DeserializeToObjectExec.$anonfun$doExecute$1(objects.scala:95)
at org.apache.spark.sql.execution.DeserializeToObjectExec.$anonfun$doExecute$1$adapted(objects.scala:94)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsWithIndexInternal$2(RDD.scala:888)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsWithIndexInternal$2$adapted(RDD.scala:888)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:374)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:338)
at org.apache.spark.sql.execution.SQLExecutionRDD.$anonfun$compute$1(SQLExecutionRDD.scala:52)
at org.apache.spark.sql.internal.SQLConf$.withExistingConf(SQLConf.scala:162)
at org.apache.spark.sql.execution.SQLExecutionRDD.compute(SQLExecutionRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:374)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:338)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:374)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:338)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:180)
at org.apache.spark.scheduler.Task.run(Task.scala:147)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$5(Executor.scala:716)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:86)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:83)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:97)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:719)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642)
at java.base/java.lang.Thread.run(Thread.java:1583)
Caused by: java.io.IOException: Failed to connect to runnervmymu0l.lcg2avo1p2berav5sjxbnkio3f.bx.internal.cloudapp.net/10.1.0.153:46807
at org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:304)
at org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:224)
at org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:236)
at org.apache.spark.rpc.netty.NettyRpcEnv.downloadClient(NettyRpcEnv.scala:406)
at org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$openChannel$4(NettyRpcEnv.scala:370)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1337)
at org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:369)
at org.apache.spark.executor.ExecutorClassLoader.getClassFileInputStreamFromSparkRPC(ExecutorClassLoader.scala:132)
at org.apache.spark.executor.ExecutorClassLoader.$anonfun$fetchFn$1(ExecutorClassLoader.scala:63)
at org.apache.spark.executor.ExecutorClassLoader.findClassLocally(ExecutorClassLoader.scala:173)
at org.apache.spark.executor.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:110)
... 93 more
Reactions are currently unavailable
Metadata
Metadata
Assignees
Labels
No labels