Last active
May 8, 2018 06:53
-
-
Save howie/440b9f5436a9e9bbd125be09fdbc7b03 to your computer and use it in GitHub Desktop.
spark CodeGenerator error
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* 001 */ public Object generate(Object[] references) { | |
/* 002 */ return new GeneratedIteratorForCodegenStage2(references); | |
/* 003 */ } | |
/* 004 */ | |
/* 005 */ final class GeneratedIteratorForCodegenStage2 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
/* 006 */ private Object[] references; | |
/* 007 */ private scala.collection.Iterator[] inputs; | |
/* 008 */ private org.apache.spark.sql.execution.joins.UnsafeHashedRelation bhj_relation; | |
/* 009 */ private boolean locallimit_stopEarly; | |
/* 010 */ private int locallimit_count; | |
/* 011 */ private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder[] bhj_mutableStateArray1 = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder[4]; | |
/* 012 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] bhj_mutableStateArray2 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[4]; | |
/* 013 */ private UnsafeRow[] bhj_mutableStateArray = new UnsafeRow[4]; | |
/* 014 */ private scala.collection.Iterator[] scan_mutableStateArray = new scala.collection.Iterator[1]; | |
/* 015 */ | |
/* 016 */ public GeneratedIteratorForCodegenStage2(Object[] references) { | |
/* 017 */ this.references = references; | |
/* 018 */ } | |
/* 019 */ | |
/* 020 */ public void init(int index, scala.collection.Iterator[] inputs) { | |
/* 021 */ partitionIndex = index; | |
/* 022 */ this.inputs = inputs; | |
/* 023 */ wholestagecodegen_init_0(); | |
/* 024 */ wholestagecodegen_init_1(); | |
/* 025 */ | |
/* 026 */ } | |
/* 027 */ | |
/* 028 */ private void wholestagecodegen_init_0() { | |
/* 029 */ scan_mutableStateArray[0] = inputs[0]; | |
/* 030 */ | |
/* 031 */ bhj_relation = ((org.apache.spark.sql.execution.joins.UnsafeHashedRelation) ((org.apache.spark.broadcast.TorrentBroadcast) references[1] /* broadcast */).value()).asReadOnlyCopy(); | |
/* 032 */ incPeakExecutionMemory(bhj_relation.estimatedSize()); | |
/* 033 */ | |
/* 034 */ org.apache.spark.TaskContext$.MODULE$.get().addTaskCompletionListener(new org.apache.spark.util.TaskCompletionListener() { | |
/* 035 */ @Override | |
/* 036 */ public void onTaskCompletion(org.apache.spark.TaskContext context) { | |
/* 037 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[2] /* avgHashProbe */).set(bhj_relation.getAverageProbesPerLookup()); | |
/* 038 */ } | |
/* 039 */ }); | |
/* 040 */ | |
/* 041 */ bhj_mutableStateArray[0] = new UnsafeRow(2); | |
/* 042 */ bhj_mutableStateArray1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(bhj_mutableStateArray[0], 64); | |
/* 043 */ bhj_mutableStateArray2[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(bhj_mutableStateArray1[0], 2); | |
/* 044 */ bhj_mutableStateArray[1] = new UnsafeRow(7); | |
/* 045 */ bhj_mutableStateArray1[1] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(bhj_mutableStateArray[1], 224); | |
/* 046 */ | |
/* 047 */ } | |
/* 048 */ | |
/* 049 */ private void wholestagecodegen_init_1() { | |
/* 050 */ bhj_mutableStateArray2[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(bhj_mutableStateArray1[1], 7); | |
/* 051 */ bhj_mutableStateArray[2] = new UnsafeRow(5); | |
/* 052 */ bhj_mutableStateArray1[2] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(bhj_mutableStateArray[2], 160); | |
/* 053 */ bhj_mutableStateArray2[2] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(bhj_mutableStateArray1[2], 5); | |
/* 054 */ | |
/* 055 */ bhj_mutableStateArray[3] = new UnsafeRow(5); | |
/* 056 */ bhj_mutableStateArray1[3] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(bhj_mutableStateArray[3], 160); | |
/* 057 */ bhj_mutableStateArray2[3] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(bhj_mutableStateArray1[3], 5); | |
/* 058 */ | |
/* 059 */ } | |
/* 060 */ | |
/* 061 */ protected void processNext() throws java.io.IOException { | |
/* 062 */ while (scan_mutableStateArray[0].hasNext()) { | |
/* 063 */ InternalRow scan_row = (InternalRow) scan_mutableStateArray[0].next(); | |
/* 064 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[0] /* numOutputRows */).add(1); | |
/* 065 */ boolean scan_isNull2 = scan_row.isNullAt(2); | |
/* 066 */ UTF8String scan_value2 = scan_isNull2 ? null : (scan_row.getUTF8String(2)); | |
/* 067 */ boolean scan_isNull3 = scan_row.isNullAt(3); | |
/* 068 */ UTF8String scan_value3 = scan_isNull3 ? null : (scan_row.getUTF8String(3)); | |
/* 069 */ | |
/* 070 */ // generate join key for stream side | |
/* 071 */ | |
/* 072 */ bhj_mutableStateArray1[0].reset(); | |
/* 073 */ | |
/* 074 */ bhj_mutableStateArray2[0].zeroOutNullBytes(); | |
/* 075 */ | |
/* 076 */ if (scan_isNull3) { | |
/* 077 */ bhj_mutableStateArray2[0].setNullAt(0); | |
/* 078 */ } else { | |
/* 079 */ bhj_mutableStateArray2[0].write(0, scan_value3); | |
/* 080 */ } | |
/* 081 */ | |
/* 082 */ if (scan_isNull2) { | |
/* 083 */ bhj_mutableStateArray2[0].setNullAt(1); | |
/* 084 */ } else { | |
/* 085 */ bhj_mutableStateArray2[0].write(1, scan_value2); | |
/* 086 */ } | |
/* 087 */ bhj_mutableStateArray[0].setTotalSize(bhj_mutableStateArray1[0].totalSize()); | |
/* 088 */ | |
/* 089 */ // find matches from HashedRelation | |
/* 090 */ UnsafeRow bhj_matched = bhj_mutableStateArray[0].anyNull() ? null: (UnsafeRow)bhj_relation.getValue(bhj_mutableStateArray[0]); | |
/* 091 */ final boolean bhj_conditionPassed = true; | |
/* 092 */ if (!bhj_conditionPassed) { | |
/* 093 */ bhj_matched = null; | |
/* 094 */ // reset the variables those are already evaluated. | |
/* 095 */ | |
/* 096 */ } | |
/* 097 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[3] /* numOutputRows */).add(1); | |
/* 098 */ | |
/* 099 */ if (locallimit_count < 101) { | |
/* 100 */ locallimit_count += 1; | |
/* 101 */ | |
/* 102 */ bhj_mutableStateArray1[3].reset(); | |
/* 103 */ | |
/* 104 */ bhj_mutableStateArray2[3].zeroOutNullBytes(); | |
/* 105 */ | |
/* 106 */ if (scan_isNull3) { | |
/* 107 */ bhj_mutableStateArray2[3].setNullAt(0); | |
/* 108 */ } else { | |
/* 109 */ bhj_mutableStateArray2[3].write(0, scan_value3); | |
/* 110 */ } | |
/* 111 */ | |
/* 112 */ if (scan_isNull2) { | |
/* 113 */ bhj_mutableStateArray2[3].setNullAt(1); | |
/* 114 */ } else { | |
/* 115 */ bhj_mutableStateArray2[3].write(1, scan_value2); | |
/* 116 */ } | |
/* 117 */ | |
/* 118 */ if (scan_isNull) { | |
/* 119 */ bhj_mutableStateArray2[3].setNullAt(2); | |
/* 120 */ } else { | |
/* 121 */ bhj_mutableStateArray2[3].write(2, scan_value); | |
/* 122 */ } | |
/* 123 */ | |
/* 124 */ if (scan_isNull1) { | |
/* 125 */ bhj_mutableStateArray2[3].setNullAt(3); | |
/* 126 */ } else { | |
/* 127 */ bhj_mutableStateArray2[3].write(3, scan_value1); | |
/* 128 */ } | |
/* 129 */ | |
/* 130 */ if (bhj_isNull3) { | |
/* 131 */ bhj_mutableStateArray2[3].setNullAt(4); | |
/* 132 */ } else { | |
/* 133 */ bhj_mutableStateArray2[3].write(4, bhj_value3); | |
/* 134 */ } | |
/* 135 */ bhj_mutableStateArray[3].setTotalSize(bhj_mutableStateArray1[3].totalSize()); | |
/* 136 */ append(bhj_mutableStateArray[3]); | |
/* 137 */ | |
/* 138 */ } else { | |
/* 139 */ locallimit_stopEarly = true; | |
/* 140 */ } | |
/* 141 */ if (shouldStop()) return; | |
/* 142 */ } | |
/* 143 */ } | |
/* 144 */ | |
/* 145 */ @Override | |
/* 146 */ protected boolean stopEarly() { | |
/* 147 */ return locallimit_stopEarly; | |
/* 148 */ } | |
/* 149 */ | |
/* 150 */ } | |
[2018-05-08 14:50:24,426][ERROR] CodeGenerator : failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 118, Column 16: Expression "scan_isNull" is not an rvalue | |
org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 118, Column 16: Expression "scan_isNull" is not an rvalue | |
at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:11821) | |
at org.codehaus.janino.UnitCompiler.toRvalueOrCompileException(UnitCompiler.java:7170) | |
at org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:5332) | |
at org.codehaus.janino.UnitCompiler.access$9400(UnitCompiler.java:212) | |
at org.codehaus.janino.UnitCompiler$13$1.visitAmbiguousName(UnitCompiler.java:5287) | |
at org.codehaus.janino.Java$AmbiguousName.accept(Java.java:4053) | |
at org.codehaus.janino.UnitCompiler$13.visitLvalue(UnitCompiler.java:5284) | |
at org.codehaus.janino.Java$Lvalue.accept(Java.java:3977) | |
at org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:5280) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2391) | |
at org.codehaus.janino.UnitCompiler.access$1900(UnitCompiler.java:212) | |
at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1474) | |
at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1466) | |
at org.codehaus.janino.Java$IfStatement.accept(Java.java:2926) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1466) | |
at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1546) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1532) | |
at org.codehaus.janino.UnitCompiler.access$1700(UnitCompiler.java:212) | |
at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1472) | |
at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1466) | |
at org.codehaus.janino.Java$Block.accept(Java.java:2756) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1466) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2444) | |
at org.codehaus.janino.UnitCompiler.access$1900(UnitCompiler.java:212) | |
at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1474) | |
at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1466) | |
at org.codehaus.janino.Java$IfStatement.accept(Java.java:2926) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1466) | |
at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1546) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1532) | |
at org.codehaus.janino.UnitCompiler.access$1700(UnitCompiler.java:212) | |
at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1472) | |
at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1466) | |
at org.codehaus.janino.Java$Block.accept(Java.java:2756) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1466) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1821) | |
at org.codehaus.janino.UnitCompiler.access$2200(UnitCompiler.java:212) | |
at org.codehaus.janino.UnitCompiler$6.visitWhileStatement(UnitCompiler.java:1477) | |
at org.codehaus.janino.UnitCompiler$6.visitWhileStatement(UnitCompiler.java:1466) | |
at org.codehaus.janino.Java$WhileStatement.accept(Java.java:3031) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1466) | |
at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1546) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3075) | |
at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1336) | |
at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1309) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:799) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:958) | |
at org.codehaus.janino.UnitCompiler.access$700(UnitCompiler.java:212) | |
at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:393) | |
at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:385) | |
at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1286) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:385) | |
at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1285) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:825) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:411) | |
at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:212) | |
at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:390) | |
at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:385) | |
at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1405) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:385) | |
at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:357) | |
at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:234) | |
at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:446) | |
at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:313) | |
at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:235) | |
at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:204) | |
at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:80) | |
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1421) | |
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1497) | |
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1494) | |
at org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599) | |
at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) | |
at org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342) | |
at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) | |
at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000) | |
at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004) | |
at org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874) | |
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1369) | |
at org.apache.spark.sql.execution.WholeStageCodegenExec.liftedTree1$1(WholeStageCodegenExec.scala:579) | |
at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:578) | |
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) | |
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) | |
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) | |
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) | |
at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247) | |
at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:337) | |
at org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:38) | |
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectFromPlan(Dataset.scala:3272) | |
at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:2484) | |
at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:2484) | |
at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3253) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252) | |
at org.apache.spark.sql.Dataset.head(Dataset.scala:2484) | |
at org.apache.spark.sql.Dataset.take(Dataset.scala:2698) | |
at org.apache.spark.sql.Dataset.showString(Dataset.scala:254) | |
at org.apache.spark.sql.Dataset.show(Dataset.scala:723) | |
at org.apache.spark.sql.Dataset.show(Dataset.scala:682) | |
at tw.howie.spark.CodeGenErrorTest.testCodeGenErrorByJoin(CodeGenErrorTest.java:110) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50) | |
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) | |
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47) | |
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) | |
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) | |
at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) | |
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325) | |
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78) | |
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57) | |
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) | |
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) | |
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) | |
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) | |
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) | |
at org.junit.runners.ParentRunner.run(ParentRunner.java:363) | |
at org.junit.runner.JUnitCore.run(JUnitCore.java:137) | |
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68) | |
at com.intellij.rt.execution.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:47) | |
at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:242) | |
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:70) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment