Created
October 30, 2021 00:04
-
-
Save c21/828b782ee81827f4148939cb50314a7b to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
== Subtree 3 / 3 (maxMethodCodeSize:687; maxConstantPoolSize:225(0.34% used); numInnerClasses:0) == | |
*(3) ShuffledHashJoin [k1#2L], [(k2#6L % 3)], FullOuter, BuildRight, NOT ((k1#2L + 3) = k2#6L) | |
:- Exchange hashpartitioning(k1#2L, 5), ENSURE_REQUIREMENTS, [id=#208] | |
: +- *(1) Project [id#0L AS k1#2L] | |
: +- *(1) Range (0, 5, step=1, splits=2) | |
+- Exchange hashpartitioning((k2#6L % 3), 5), ENSURE_REQUIREMENTS, [id=#211] | |
+- *(2) Project [id#4L AS k2#6L] | |
+- *(2) Range (0, 10, step=1, splits=2) | |
Generated code: | |
/* 001 */ public Object generate(Object[] references) { | |
/* 002 */ return new GeneratedIteratorForCodegenStage3(references); | |
/* 003 */ } | |
/* 004 */ | |
/* 005 */ // codegenStageId=3 | |
/* 006 */ final class GeneratedIteratorForCodegenStage3 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
/* 007 */ private Object[] references; | |
/* 008 */ private scala.collection.Iterator[] inputs; | |
/* 009 */ private org.apache.spark.sql.execution.joins.HashedRelation shj_relation_0; | |
/* 010 */ private boolean shj_keyIsUnique_0; | |
/* 011 */ private scala.collection.Iterator shj_streamedInput_0; | |
/* 012 */ private scala.collection.Iterator shj_buildInput_0; | |
/* 013 */ private InternalRow shj_streamedRow_0; | |
/* 014 */ private InternalRow shj_buildRow_0; | |
/* 015 */ private org.apache.spark.util.collection.BitSet wholestagecodegen_matchedKeySet_0; | |
/* 016 */ private org.apache.spark.util.collection.OpenHashSet wholestagecodegen_matchedRowSet_0; | |
/* 017 */ private int wholestagecodegen_prevKeyIndex_0; | |
/* 018 */ private int wholestagecodegen_valueIndex_0; | |
/* 019 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] shj_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[2]; | |
/* 020 */ | |
/* 021 */ public GeneratedIteratorForCodegenStage3(Object[] references) { | |
/* 022 */ this.references = references; | |
/* 023 */ } | |
/* 024 */ | |
/* 025 */ public void init(int index, scala.collection.Iterator[] inputs) { | |
/* 026 */ partitionIndex = index; | |
/* 027 */ this.inputs = inputs; | |
/* 028 */ shj_relation_0 = ((org.apache.spark.sql.execution.joins.ShuffledHashJoinExec) references[0] /* plan */).buildHashedRelation(inputs[1]); | |
/* 029 */ shj_keyIsUnique_0 = shj_relation_0.keyIsUnique(); | |
/* 030 */ shj_streamedInput_0 = inputs[0]; | |
/* 031 */ shj_buildInput_0 = shj_relation_0.valuesWithKeyIndex(); | |
/* 032 */ | |
/* 033 */ shj_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); | |
/* 034 */ shj_mutableStateArray_0[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 0); | |
/* 035 */ wholestagecodegen_matchedKeySet_0 = new org.apache.spark.util.collection.BitSet(shj_relation_0.maxNumKeysIndex()); | |
/* 036 */ wholestagecodegen_matchedRowSet_0 = new org.apache.spark.util.collection.OpenHashSet(scala.reflect.ClassTag$.MODULE$.Long()); | |
/* 037 */ wholestagecodegen_prevKeyIndex_0 = -1; | |
/* 038 */ wholestagecodegen_valueIndex_0 = -1; | |
/* 039 */ | |
/* 040 */ } | |
/* 041 */ | |
/* 042 */ private void shj_consumeFullOuterJoinRow_0() { | |
/* 043 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[1] /* numOutputRows */).add(1); | |
/* 044 */ | |
/* 045 */ boolean shj_isNull_10 = true; | |
/* 046 */ long shj_value_10 = -1L; | |
/* 047 */ if (shj_streamedRow_0 != null) { | |
/* 048 */ long shj_value_9 = shj_streamedRow_0.getLong(0); | |
/* 049 */ shj_isNull_10 = false; | |
/* 050 */ shj_value_10 = shj_value_9; | |
/* 051 */ } | |
/* 052 */ boolean shj_isNull_12 = true; | |
/* 053 */ long shj_value_12 = -1L; | |
/* 054 */ if (shj_buildRow_0 != null) { | |
/* 055 */ long shj_value_11 = shj_buildRow_0.getLong(0); | |
/* 056 */ shj_isNull_12 = false; | |
/* 057 */ shj_value_12 = shj_value_11; | |
/* 058 */ } | |
/* 059 */ shj_mutableStateArray_0[1].reset(); | |
/* 060 */ | |
/* 061 */ shj_mutableStateArray_0[1].zeroOutNullBytes(); | |
/* 062 */ | |
/* 063 */ if (shj_isNull_10) { | |
/* 064 */ shj_mutableStateArray_0[1].setNullAt(0); | |
/* 065 */ } else { | |
/* 066 */ shj_mutableStateArray_0[1].write(0, shj_value_10); | |
/* 067 */ } | |
/* 068 */ | |
/* 069 */ if (shj_isNull_12) { | |
/* 070 */ shj_mutableStateArray_0[1].setNullAt(1); | |
/* 071 */ } else { | |
/* 072 */ shj_mutableStateArray_0[1].write(1, shj_value_12); | |
/* 073 */ } | |
/* 074 */ append((shj_mutableStateArray_0[1].getRow()).copy()); | |
/* 075 */ | |
/* 076 */ } | |
/* 077 */ | |
/* 078 */ protected void processNext() throws java.io.IOException { | |
/* 079 */ if (shj_keyIsUnique_0) { | |
/* 080 */ while (shj_streamedInput_0.hasNext()) { | |
/* 081 */ shj_streamedRow_0 = (InternalRow) shj_streamedInput_0.next(); | |
/* 082 */ | |
/* 083 */ // generate join key for stream side | |
/* 084 */ | |
/* 085 */ long shj_value_0 = shj_streamedRow_0.getLong(0); | |
/* 086 */ | |
/* 087 */ shj_mutableStateArray_0[0].reset(); | |
/* 088 */ | |
/* 089 */ shj_mutableStateArray_0[0].write(0, shj_value_0); | |
/* 090 */ | |
/* 091 */ // find matches from HashedRelation | |
/* 092 */ boolean wholestagecodegen_foundMatch_0 = false; | |
/* 093 */ shj_buildRow_0 = null; | |
/* 094 */ org.apache.spark.sql.execution.joins.ValueRowWithKeyIndex wholestagecodegen_rowWithIndex_0 = (shj_mutableStateArray_0[0].getRow()).anyNull() ? null: | |
/* 095 */ shj_relation_0.getValueWithKeyIndex((shj_mutableStateArray_0[0].getRow())); | |
/* 096 */ | |
/* 097 */ if (wholestagecodegen_rowWithIndex_0 != null) { | |
/* 098 */ shj_buildRow_0 = wholestagecodegen_rowWithIndex_0.getValue(); | |
/* 099 */ // check join condition | |
/* 100 */ | |
/* 101 */ long shj_value_2 = shj_buildRow_0.getLong(0); | |
/* 102 */ | |
/* 103 */ long shj_value_5 = -1L; | |
/* 104 */ | |
/* 105 */ shj_value_5 = shj_value_0 + 3L; | |
/* 106 */ | |
/* 107 */ boolean shj_value_4 = false; | |
/* 108 */ shj_value_4 = shj_value_5 == shj_value_2; | |
/* 109 */ boolean shj_value_3 = false; | |
/* 110 */ shj_value_3 = !(shj_value_4); | |
/* 111 */ if (!(false || !shj_value_3)) | |
/* 112 */ { | |
/* 113 */ // set key index in matched keys set | |
/* 114 */ wholestagecodegen_matchedKeySet_0.set(wholestagecodegen_rowWithIndex_0.getKeyIndex()); | |
/* 115 */ wholestagecodegen_foundMatch_0 = true; | |
/* 116 */ } | |
/* 117 */ | |
/* 118 */ if (!wholestagecodegen_foundMatch_0) { | |
/* 119 */ shj_buildRow_0 = null; | |
/* 120 */ } | |
/* 121 */ } | |
/* 122 */ | |
/* 123 */ shj_consumeFullOuterJoinRow_0(); | |
/* 124 */ if (shouldStop()) return; | |
/* 125 */ } | |
/* 126 */ | |
/* 127 */ shj_streamedRow_0 = null; | |
/* 128 */ | |
/* 129 */ // find non-matched rows from HashedRelation | |
/* 130 */ while (shj_buildInput_0.hasNext()) { | |
/* 131 */ org.apache.spark.sql.execution.joins.ValueRowWithKeyIndex wholestagecodegen_rowWithIndex_0 = (org.apache.spark.sql.execution.joins.ValueRowWithKeyIndex) shj_buildInput_0.next(); | |
/* 132 */ | |
/* 133 */ // check if key index is not in matched keys set | |
/* 134 */ if (!wholestagecodegen_matchedKeySet_0.get(wholestagecodegen_rowWithIndex_0.getKeyIndex())) { | |
/* 135 */ shj_buildRow_0 = wholestagecodegen_rowWithIndex_0.getValue(); | |
/* 136 */ shj_consumeFullOuterJoinRow_0(); | |
/* 137 */ } | |
/* 138 */ | |
/* 139 */ if (shouldStop()) return; | |
/* 140 */ } | |
/* 141 */ | |
/* 142 */ } else { | |
/* 143 */ while (shj_streamedInput_0.hasNext()) { | |
/* 144 */ shj_streamedRow_0 = (InternalRow) shj_streamedInput_0.next(); | |
/* 145 */ | |
/* 146 */ // generate join key for stream side | |
/* 147 */ | |
/* 148 */ long shj_value_0 = shj_streamedRow_0.getLong(0); | |
/* 149 */ | |
/* 150 */ shj_mutableStateArray_0[0].reset(); | |
/* 151 */ | |
/* 152 */ shj_mutableStateArray_0[0].write(0, shj_value_0); | |
/* 153 */ | |
/* 154 */ // find matches from HashedRelation | |
/* 155 */ boolean wholestagecodegen_foundMatch_1 = false; | |
/* 156 */ shj_buildRow_0 = null; | |
/* 157 */ scala.collection.Iterator wholestagecodegen_buildIterator_0 = (shj_mutableStateArray_0[0].getRow()).anyNull() ? null: | |
/* 158 */ shj_relation_0.getWithKeyIndex((shj_mutableStateArray_0[0].getRow())); | |
/* 159 */ | |
/* 160 */ int wholestagecodegen_valueIndex_0 = -1; | |
/* 161 */ while (wholestagecodegen_buildIterator_0 != null && wholestagecodegen_buildIterator_0.hasNext()) { | |
/* 162 */ org.apache.spark.sql.execution.joins.ValueRowWithKeyIndex wholestagecodegen_rowWithIndex_1 = (org.apache.spark.sql.execution.joins.ValueRowWithKeyIndex) wholestagecodegen_buildIterator_0.next(); | |
/* 163 */ int wholestagecodegen_keyIndex_0 = wholestagecodegen_rowWithIndex_1.getKeyIndex(); | |
/* 164 */ shj_buildRow_0 = wholestagecodegen_rowWithIndex_1.getValue(); | |
/* 165 */ wholestagecodegen_valueIndex_0++; | |
/* 166 */ | |
/* 167 */ // check join condition | |
/* 168 */ | |
/* 169 */ long shj_value_2 = shj_buildRow_0.getLong(0); | |
/* 170 */ | |
/* 171 */ long shj_value_5 = -1L; | |
/* 172 */ | |
/* 173 */ shj_value_5 = shj_value_0 + 3L; | |
/* 174 */ | |
/* 175 */ boolean shj_value_4 = false; | |
/* 176 */ shj_value_4 = shj_value_5 == shj_value_2; | |
/* 177 */ boolean shj_value_3 = false; | |
/* 178 */ shj_value_3 = !(shj_value_4); | |
/* 179 */ if (!(false || !shj_value_3)) | |
/* 180 */ { | |
/* 181 */ // set row index in matched row set | |
/* 182 */ wholestagecodegen_matchedRowSet_0.add((((long)wholestagecodegen_keyIndex_0) << 32) | wholestagecodegen_valueIndex_0); | |
/* 183 */ wholestagecodegen_foundMatch_1 = true; | |
/* 184 */ shj_consumeFullOuterJoinRow_0(); | |
/* 185 */ } | |
/* 186 */ } | |
/* 187 */ | |
/* 188 */ if (!wholestagecodegen_foundMatch_1) { | |
/* 189 */ shj_buildRow_0 = null; | |
/* 190 */ shj_consumeFullOuterJoinRow_0(); | |
/* 191 */ } | |
/* 192 */ | |
/* 193 */ if (shouldStop()) return; | |
/* 194 */ } | |
/* 195 */ | |
/* 196 */ shj_streamedRow_0 = null; | |
/* 197 */ | |
/* 198 */ // find non-matched rows from HashedRelation | |
/* 199 */ while (shj_buildInput_0.hasNext()) { | |
/* 200 */ org.apache.spark.sql.execution.joins.ValueRowWithKeyIndex wholestagecodegen_rowWithIndex_1 = (org.apache.spark.sql.execution.joins.ValueRowWithKeyIndex) shj_buildInput_0.next(); | |
/* 201 */ int wholestagecodegen_keyIndex_0 = wholestagecodegen_rowWithIndex_1.getKeyIndex(); | |
/* 202 */ if (wholestagecodegen_prevKeyIndex_0 == -1 || wholestagecodegen_keyIndex_0 != wholestagecodegen_prevKeyIndex_0) { | |
/* 203 */ wholestagecodegen_valueIndex_0 = 0; | |
/* 204 */ wholestagecodegen_prevKeyIndex_0 = wholestagecodegen_keyIndex_0; | |
/* 205 */ } else { | |
/* 206 */ wholestagecodegen_valueIndex_0 += 1; | |
/* 207 */ } | |
/* 208 */ | |
/* 209 */ // check if row index is not in matched row set | |
/* 210 */ if (!wholestagecodegen_matchedRowSet_0.contains((((long)wholestagecodegen_keyIndex_0) << 32) | wholestagecodegen_valueIndex_0)) { | |
/* 211 */ shj_buildRow_0 = wholestagecodegen_rowWithIndex_1.getValue(); | |
/* 212 */ shj_consumeFullOuterJoinRow_0(); | |
/* 213 */ } | |
/* 214 */ | |
/* 215 */ if (shouldStop()) return; | |
/* 216 */ } | |
/* 217 */ | |
/* 218 */ } | |
/* 219 */ } | |
/* 220 */ | |
/* 221 */ } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment