Last active
May 6, 2019 03:12
-
-
Save xuechendi/abc45db1231f8b8c8196f3b232963dd4 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// codegenStageId=1 | |
final class GeneratedIteratorForCodegenStage1 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
private Object[] references; | |
private scala.collection.Iterator[] inputs; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] project_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; | |
private scala.collection.Iterator[] batchscan_mutableStateArray_0 = new scala.collection.Iterator[1]; | |
public GeneratedIteratorForCodegenStage1(Object[] references) { | |
this.references = references; | |
} | |
public void init(int index, scala.collection.Iterator[] inputs) { | |
partitionIndex = index; | |
this.inputs = inputs; | |
batchscan_mutableStateArray_0[0] = inputs[0]; | |
project_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
} | |
protected void processNext() throws java.io.IOException { | |
while ( batchscan_mutableStateArray_0[0].hasNext()) { | |
InternalRow batchscan_row_0 = (InternalRow) batchscan_mutableStateArray_0[0].next(); | |
((org.apache.spark.sql.execution.metric.SQLMetric) references[0] /* numOutputRows */).add(1); | |
boolean batchscan_isNull_0 = batchscan_row_0.isNullAt(0); | |
UTF8String batchscan_value_0 = batchscan_isNull_0 ? | |
null : (batchscan_row_0.getUTF8String(0)); | |
boolean project_isNull_1 = true; | |
int project_value_1 = -1; | |
boolean batchscan_isNull_1 = batchscan_row_0.isNullAt(1); | |
int batchscan_value_1 = batchscan_isNull_1 ? | |
-1 : (batchscan_row_0.getInt(1)); | |
if (!batchscan_isNull_1) { | |
project_isNull_1 = false; // resultCode could change nullability. | |
project_value_1 = batchscan_value_1 + 1; | |
} | |
project_mutableStateArray_0[0].reset(); | |
project_mutableStateArray_0[0].zeroOutNullBytes(); | |
if (batchscan_isNull_0) { | |
project_mutableStateArray_0[0].setNullAt(0); | |
} else { | |
project_mutableStateArray_0[0].write(0, batchscan_value_0); | |
} | |
if (project_isNull_1) { | |
project_mutableStateArray_0[0].setNullAt(1); | |
} else { | |
project_mutableStateArray_0[0].write(1, project_value_1); | |
} | |
append((project_mutableStateArray_0[0].getRow())); | |
if (shouldStop()) return; | |
} | |
} | |
} | |
// GenerateUnsafeProjection: code for input[0, string, true],input[1, string, true] | |
public java.lang.Object generate(Object[] references) { | |
return new SpecificUnsafeProjection(references); | |
} | |
class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { | |
private Object[] references; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; | |
public SpecificUnsafeProjection(Object[] references) { | |
this.references = references; | |
mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 64); | |
} | |
public void initialize(int partitionIndex) { | |
} | |
// Scala.Function1 need this | |
public java.lang.Object apply(java.lang.Object row) { | |
return apply((InternalRow) row); | |
} | |
public UnsafeRow apply(InternalRow i) { | |
mutableStateArray_0[0].reset(); | |
mutableStateArray_0[0].zeroOutNullBytes(); | |
boolean isNull_0 = i.isNullAt(0); | |
UTF8String value_0 = isNull_0 ? | |
null : (i.getUTF8String(0)); | |
if (isNull_0) { | |
mutableStateArray_0[0].setNullAt(0); | |
} else { | |
mutableStateArray_0[0].write(0, value_0); | |
} | |
boolean isNull_1 = i.isNullAt(1); | |
UTF8String value_1 = isNull_1 ? | |
null : (i.getUTF8String(1)); | |
if (isNull_1) { | |
mutableStateArray_0[0].setNullAt(1); | |
} else { | |
mutableStateArray_0[0].write(1, value_1); | |
} | |
return (mutableStateArray_0[0].getRow()); | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// codegenStageId=1 | |
final class GeneratedIteratorForCodegenStage1 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
private Object[] references; | |
private scala.collection.Iterator[] inputs; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] project_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; | |
private scala.collection.Iterator[] batchscan_mutableStateArray_0 = new scala.collection.Iterator[1]; | |
public GeneratedIteratorForCodegenStage1(Object[] references) { | |
this.references = references; | |
} | |
public void init(int index, scala.collection.Iterator[] inputs) { | |
partitionIndex = index; | |
this.inputs = inputs; | |
batchscan_mutableStateArray_0[0] = inputs[0]; | |
project_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
} | |
protected void processNext() throws java.io.IOException { | |
while ( batchscan_mutableStateArray_0[0].hasNext()) { | |
InternalRow batchscan_row_0 = (InternalRow) batchscan_mutableStateArray_0[0].next(); | |
((org.apache.spark.sql.execution.metric.SQLMetric) references[0] /* numOutputRows */).add(1); | |
boolean batchscan_isNull_0 = batchscan_row_0.isNullAt(0); | |
UTF8String batchscan_value_0 = batchscan_isNull_0 ? | |
null : (batchscan_row_0.getUTF8String(0)); | |
boolean batchscan_isNull_1 = batchscan_row_0.isNullAt(1); | |
int batchscan_value_1 = batchscan_isNull_1 ? | |
-1 : (batchscan_row_0.getInt(1)); | |
project_mutableStateArray_0[0].reset(); | |
project_mutableStateArray_0[0].zeroOutNullBytes(); | |
if (batchscan_isNull_0) { | |
project_mutableStateArray_0[0].setNullAt(0); | |
} else { | |
project_mutableStateArray_0[0].write(0, batchscan_value_0); | |
} | |
if (batchscan_isNull_1) { | |
project_mutableStateArray_0[0].setNullAt(1); | |
} else { | |
project_mutableStateArray_0[0].write(1, batchscan_value_1); | |
} | |
append((project_mutableStateArray_0[0].getRow())); | |
if (shouldStop()) return; | |
} | |
} | |
} | |
// codegenStageId=2 | |
final class GeneratedIteratorForCodegenStage2 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
private Object[] references; | |
private scala.collection.Iterator[] inputs; | |
private scala.collection.Iterator inputadapter_input_0; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] project_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; | |
public GeneratedIteratorForCodegenStage2(Object[] references) { | |
this.references = references; | |
} | |
public void init(int index, scala.collection.Iterator[] inputs) { | |
partitionIndex = index; | |
this.inputs = inputs; | |
inputadapter_input_0 = inputs[0]; | |
project_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
} | |
protected void processNext() throws java.io.IOException { | |
while ( inputadapter_input_0.hasNext()) { | |
InternalRow inputadapter_row_0 = (InternalRow) inputadapter_input_0.next(); | |
boolean inputadapter_isNull_0 = inputadapter_row_0.isNullAt(0); | |
UTF8String inputadapter_value_0 = inputadapter_isNull_0 ? | |
null : (inputadapter_row_0.getUTF8String(0)); | |
boolean inputadapter_isNull_2 = inputadapter_row_0.isNullAt(2); | |
int inputadapter_value_2 = inputadapter_isNull_2 ? | |
-1 : (inputadapter_row_0.getInt(2)); | |
project_mutableStateArray_0[0].reset(); | |
project_mutableStateArray_0[0].zeroOutNullBytes(); | |
if (inputadapter_isNull_0) { | |
project_mutableStateArray_0[0].setNullAt(0); | |
} else { | |
project_mutableStateArray_0[0].write(0, inputadapter_value_0); | |
} | |
if (inputadapter_isNull_2) { | |
project_mutableStateArray_0[0].setNullAt(1); | |
} else { | |
project_mutableStateArray_0[0].write(1, inputadapter_value_2); | |
} | |
append((project_mutableStateArray_0[0].getRow())); | |
if (shouldStop()) return; | |
} | |
} | |
} | |
// Projection | |
class SpecificMutableProjection extends org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection { | |
private Object[] references; | |
private InternalRow mutableRow; | |
private int value_1; | |
private boolean isNull_1; | |
public SpecificMutableProjection(Object[] references) { | |
this.references = references; | |
mutableRow = new org.apache.spark.sql.catalyst.expressions.GenericInternalRow(1); | |
} | |
public void initialize(int partitionIndex) { | |
} | |
public org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection target(InternalRow row) { | |
mutableRow = row; | |
return this; | |
} | |
/* Provide immutable access to the last projected row. */ | |
public InternalRow currentValue() { | |
return (InternalRow) mutableRow; | |
} | |
public java.lang.Object apply(java.lang.Object _i) { | |
InternalRow i = (InternalRow) _i; | |
boolean isNull_0 = i.isNullAt(1); | |
int value_0 = isNull_0 ? | |
-1 : (i.getInt(1)); | |
isNull_1 = isNull_0; | |
value_1 = value_0; | |
// copy all the results into MutableRow | |
if (!isNull_1) { | |
mutableRow.setInt(0, value_1); | |
} else { | |
mutableRow.setNullAt(0); | |
} | |
return mutableRow; | |
} | |
} | |
class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { | |
private Object[] references; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; | |
public SpecificUnsafeProjection(Object[] references) { | |
this.references = references; | |
mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(3, 32); | |
} | |
public void initialize(int partitionIndex) { | |
} | |
// Scala.Function1 need this | |
public java.lang.Object apply(java.lang.Object row) { | |
return apply((InternalRow) row); | |
} | |
public UnsafeRow apply(InternalRow i) { | |
mutableStateArray_0[0].reset(); | |
mutableStateArray_0[0].zeroOutNullBytes(); | |
boolean isNull_0 = i.isNullAt(0); | |
UTF8String value_0 = isNull_0 ? | |
null : (i.getUTF8String(0)); | |
if (isNull_0) { | |
mutableStateArray_0[0].setNullAt(0); | |
} else { | |
mutableStateArray_0[0].write(0, value_0); | |
} | |
boolean isNull_1 = i.isNullAt(1); | |
int value_1 = isNull_1 ? | |
-1 : (i.getInt(1)); | |
if (isNull_1) { | |
mutableStateArray_0[0].setNullAt(1); | |
} else { | |
mutableStateArray_0[0].write(1, value_1); | |
} | |
return (mutableStateArray_0[0].getRow()); | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
public Object generate(Object[] references) { | |
return new GeneratedIteratorForCodegenStage1(references); | |
} | |
// codegenStageId=1 | |
final class GeneratedIteratorForCodegenStage1 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
private Object[] references; | |
private scala.collection.Iterator[] inputs; | |
private long batchscan_scanTime_0; | |
private int batchscan_batchIdx_0; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] batchscan_mutableStateArray_3 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[2]; | |
private org.apache.spark.sql.vectorized.ColumnarBatch[] batchscan_mutableStateArray_1 = new org.apache.spark.sql.vectorized.ColumnarBatch[1]; | |
private scala.collection.Iterator[] batchscan_mutableStateArray_0 = new scala.collection.Iterator[1]; | |
private org.apache.spark.sql.vectorized.ColumnVector[] batchscan_mutableStateArray_2 = new org.apache.spark.sql.vectorized.ColumnVector[2]; | |
public GeneratedIteratorForCodegenStage1(Object[] references) { | |
this.references = references; | |
} | |
public void init(int index, scala.collection.Iterator[] inputs) { | |
partitionIndex = index; | |
this.inputs = inputs; | |
batchscan_mutableStateArray_0[0] = inputs[0]; | |
batchscan_mutableStateArray_3[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
batchscan_mutableStateArray_3[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
} | |
protected void processNext() throws java.io.IOException { | |
if (batchscan_mutableStateArray_1[0] == null) { | |
batchscan_nextBatch_0(); | |
} | |
while ( batchscan_mutableStateArray_1[0] != null) { | |
int batchscan_numRows_0 = batchscan_mutableStateArray_1[0].numRows(); | |
int batchscan_localEnd_0 = batchscan_numRows_0 - batchscan_batchIdx_0; | |
for (int batchscan_localIdx_0 = 0; batchscan_localIdx_0 < batchscan_localEnd_0; batchscan_localIdx_0++) { | |
int batchscan_rowIdx_0 = batchscan_batchIdx_0 + batchscan_localIdx_0; | |
boolean batchscan_isNull_0 = batchscan_mutableStateArray_2[0].isNullAt(batchscan_rowIdx_0); | |
UTF8String batchscan_value_0 = batchscan_isNull_0 ? null : (batchscan_mutableStateArray_2[0].getUTF8String(batchscan_rowIdx_0)); | |
boolean project_isNull_1 = true; | |
int project_value_1 = -1; | |
boolean batchscan_isNull_1 = batchscan_mutableStateArray_2[1].isNullAt(batchscan_rowIdx_0); | |
int batchscan_value_1 = batchscan_isNull_1 ? -1 : (batchscan_mutableStateArray_2[1].getInt(batchscan_rowIdx_0)); | |
if (!batchscan_isNull_1) { | |
project_isNull_1 = false; // resultCode could change nullability. | |
project_value_1 = batchscan_value_1 + 1; | |
} | |
batchscan_mutableStateArray_3[1].reset(); | |
batchscan_mutableStateArray_3[1].zeroOutNullBytes(); | |
if (batchscan_isNull_0) { | |
batchscan_mutableStateArray_3[1].setNullAt(0); | |
} else { | |
batchscan_mutableStateArray_3[1].write(0, batchscan_value_0); | |
} | |
if (project_isNull_1) { | |
batchscan_mutableStateArray_3[1].setNullAt(1); | |
} else { | |
batchscan_mutableStateArray_3[1].write(1, project_value_1); | |
} | |
append((batchscan_mutableStateArray_3[1].getRow())); | |
if (shouldStop()) { batchscan_batchIdx_0 = batchscan_rowIdx_0 + 1; return; } | |
} | |
batchscan_batchIdx_0 = batchscan_numRows_0; | |
batchscan_mutableStateArray_1[0] = null; | |
batchscan_nextBatch_0(); | |
} | |
((org.apache.spark.sql.execution.metric.SQLMetric) references[1] /* scanTime */).add(batchscan_scanTime_0 / (1000 * 1000)); | |
batchscan_scanTime_0 = 0; | |
} | |
private void batchscan_nextBatch_0() throws java.io.IOException { | |
long getBatchStart = System.nanoTime(); | |
if (batchscan_mutableStateArray_0[0].hasNext()) { | |
batchscan_mutableStateArray_1[0] = (org.apache.spark.sql.vectorized.ColumnarBatch)batchscan_mutableStateArray_0[0].next(); | |
((org.apache.spark.sql.execution.metric.SQLMetric) references[0] /* numOutputRows */).add(batchscan_mutableStateArray_1[0].numRows()); | |
batchscan_batchIdx_0 = 0; | |
batchscan_mutableStateArray_2[0] = (org.apache.spark.sql.vectorized.ColumnVector) batchscan_mutableStateArray_1[0].column(0); | |
batchscan_mutableStateArray_2[1] = (org.apache.spark.sql.vectorized.ColumnVector) batchscan_mutableStateArray_1[0].column(1); | |
} | |
batchscan_scanTime_0 += System.nanoTime() - getBatchStart; | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// codegenStageId=1 | |
final class GeneratedIteratorForCodegenStage1 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
private Object[] references; | |
private scala.collection.Iterator[] inputs; | |
private long batchscan_scanTime_0; | |
private int batchscan_batchIdx_0; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] batchscan_mutableStateArray_3 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[2]; | |
private org.apache.spark.sql.vectorized.ColumnarBatch[] batchscan_mutableStateArray_1 = new org.apache.spark.sql.vectorized.ColumnarBatch[1]; | |
private scala.collection.Iterator[] batchscan_mutableStateArray_0 = new scala.collection.Iterator[1]; | |
private org.apache.spark.sql.vectorized.ColumnVector[] batchscan_mutableStateArray_2 = new org.apache.spark.sql.vectorized.ColumnVector[2]; | |
public GeneratedIteratorForCodegenStage1(Object[] references) { | |
this.references = references; | |
} | |
public void init(int index, scala.collection.Iterator[] inputs) { | |
partitionIndex = index; | |
this.inputs = inputs; | |
batchscan_mutableStateArray_0[0] = inputs[0]; | |
batchscan_mutableStateArray_3[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
batchscan_mutableStateArray_3[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
} | |
protected void processNext() throws java.io.IOException { | |
if (batchscan_mutableStateArray_1[0] == null) { | |
batchscan_nextBatch_0(); | |
} | |
while ( batchscan_mutableStateArray_1[0] != null) { | |
int batchscan_numRows_0 = batchscan_mutableStateArray_1[0].numRows(); | |
int batchscan_localEnd_0 = batchscan_numRows_0 - batchscan_batchIdx_0; | |
for (int batchscan_localIdx_0 = 0; batchscan_localIdx_0 < batchscan_localEnd_0; batchscan_localIdx_0++) { | |
int batchscan_rowIdx_0 = batchscan_batchIdx_0 + batchscan_localIdx_0; | |
boolean batchscan_isNull_0 = batchscan_mutableStateArray_2[0].isNullAt(batchscan_rowIdx_0); | |
UTF8String batchscan_value_0 = batchscan_isNull_0 ? null : (batchscan_mutableStateArray_2[0].getUTF8String(batchscan_rowIdx_0)); | |
boolean batchscan_isNull_1 = batchscan_mutableStateArray_2[1].isNullAt(batchscan_rowIdx_0); | |
int batchscan_value_1 = batchscan_isNull_1 ? -1 : (batchscan_mutableStateArray_2[1].getInt(batchscan_rowIdx_0)); | |
batchscan_mutableStateArray_3[1].reset(); | |
batchscan_mutableStateArray_3[1].zeroOutNullBytes(); | |
if (batchscan_isNull_0) { | |
batchscan_mutableStateArray_3[1].setNullAt(0); | |
} else { | |
batchscan_mutableStateArray_3[1].write(0, batchscan_value_0); | |
} | |
if (batchscan_isNull_1) { | |
batchscan_mutableStateArray_3[1].setNullAt(1); | |
} else { | |
batchscan_mutableStateArray_3[1].write(1, batchscan_value_1); | |
} | |
append((batchscan_mutableStateArray_3[1].getRow())); | |
if (shouldStop()) { batchscan_batchIdx_0 = batchscan_rowIdx_0 + 1; return; } | |
} | |
batchscan_batchIdx_0 = batchscan_numRows_0; | |
batchscan_mutableStateArray_1[0] = null; | |
batchscan_nextBatch_0(); | |
} | |
((org.apache.spark.sql.execution.metric.SQLMetric) references[1] /* scanTime */).add(batchscan_scanTime_0 / (1000 * 1000)); | |
batchscan_scanTime_0 = 0; | |
} | |
private void batchscan_nextBatch_0() throws java.io.IOException { | |
long getBatchStart = System.nanoTime(); | |
if (batchscan_mutableStateArray_0[0].hasNext()) { | |
batchscan_mutableStateArray_1[0] = (org.apache.spark.sql.vectorized.ColumnarBatch)batchscan_mutableStateArray_0[0].next(); | |
((org.apache.spark.sql.execution.metric.SQLMetric) references[0] /* numOutputRows */).add(batchscan_mutableStateArray_1[0].numRows()); | |
batchscan_batchIdx_0 = 0; | |
batchscan_mutableStateArray_2[0] = (org.apache.spark.sql.vectorized.ColumnVector) batchscan_mutableStateArray_1[0].column(0); | |
batchscan_mutableStateArray_2[1] = (org.apache.spark.sql.vectorized.ColumnVector) batchscan_mutableStateArray_1[0].column(1); | |
} | |
batchscan_scanTime_0 += System.nanoTime() - getBatchStart; | |
} | |
} | |
// codegenStageId=2 | |
final class GeneratedIteratorForCodegenStage2 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
private Object[] references; | |
private scala.collection.Iterator[] inputs; | |
private scala.collection.Iterator inputadapter_input_0; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] project_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; | |
public GeneratedIteratorForCodegenStage2(Object[] references) { | |
this.references = references; | |
} | |
public void init(int index, scala.collection.Iterator[] inputs) { | |
partitionIndex = index; | |
this.inputs = inputs; | |
inputadapter_input_0 = inputs[0]; | |
project_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
} | |
protected void processNext() throws java.io.IOException { | |
while ( inputadapter_input_0.hasNext()) { | |
InternalRow inputadapter_row_0 = (InternalRow) inputadapter_input_0.next(); | |
boolean inputadapter_isNull_0 = inputadapter_row_0.isNullAt(0); | |
UTF8String inputadapter_value_0 = inputadapter_isNull_0 ? | |
null : (inputadapter_row_0.getUTF8String(0)); | |
boolean inputadapter_isNull_2 = inputadapter_row_0.isNullAt(2); | |
int inputadapter_value_2 = inputadapter_isNull_2 ? | |
-1 : (inputadapter_row_0.getInt(2)); | |
project_mutableStateArray_0[0].reset(); | |
project_mutableStateArray_0[0].zeroOutNullBytes(); | |
if (inputadapter_isNull_0) { | |
project_mutableStateArray_0[0].setNullAt(0); | |
} else { | |
project_mutableStateArray_0[0].write(0, inputadapter_value_0); | |
} | |
if (inputadapter_isNull_2) { | |
project_mutableStateArray_0[0].setNullAt(1); | |
} else { | |
project_mutableStateArray_0[0].write(1, inputadapter_value_2); | |
} | |
append((project_mutableStateArray_0[0].getRow())); | |
if (shouldStop()) return; | |
} | |
} | |
} | |
class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { | |
private Object[] references; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; | |
public SpecificUnsafeProjection(Object[] references) { | |
this.references = references; | |
mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(3, 32); | |
} | |
public void initialize(int partitionIndex) { | |
} | |
// Scala.Function1 need this | |
public java.lang.Object apply(java.lang.Object row) { | |
return apply((InternalRow) row); | |
} | |
public UnsafeRow apply(InternalRow i) { | |
mutableStateArray_0[0].reset(); | |
mutableStateArray_0[0].zeroOutNullBytes(); | |
boolean isNull_0 = i.isNullAt(0); | |
UTF8String value_0 = isNull_0 ? | |
null : (i.getUTF8String(0)); | |
if (isNull_0) { | |
mutableStateArray_0[0].setNullAt(0); | |
} else { | |
mutableStateArray_0[0].write(0, value_0); | |
} | |
boolean isNull_1 = i.isNullAt(1); | |
int value_1 = isNull_1 ? | |
-1 : (i.getInt(1)); | |
if (isNull_1) { | |
mutableStateArray_0[0].setNullAt(1); | |
} else { | |
mutableStateArray_0[0].write(1, value_1); | |
} | |
boolean isNull_2 = i.isNullAt(2); | |
int value_2 = isNull_2 ? | |
-1 : (i.getInt(2)); | |
if (isNull_2) { | |
mutableStateArray_0[0].setNullAt(2); | |
} else { | |
mutableStateArray_0[0].write(2, value_2); | |
} | |
return (mutableStateArray_0[0].getRow()); | |
} | |
} | |
class SpecificMutableProjection extends org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection { | |
private Object[] references; | |
private InternalRow mutableRow; | |
private int value_1; | |
private boolean isNull_1; | |
public SpecificMutableProjection(Object[] references) { | |
this.references = references; | |
mutableRow = new org.apache.spark.sql.catalyst.expressions.GenericInternalRow(1); | |
} | |
public void initialize(int partitionIndex) { | |
} | |
public org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection target(InternalRow row) { | |
mutableRow = row; | |
return this; | |
} | |
/* Provide immutable access to the last projected row. */ | |
public InternalRow currentValue() { | |
return (InternalRow) mutableRow; | |
} | |
public java.lang.Object apply(java.lang.Object _i) { | |
InternalRow i = (InternalRow) _i; | |
boolean isNull_0 = i.isNullAt(1); | |
int value_0 = isNull_0 ? | |
-1 : (i.getInt(1)); | |
isNull_1 = isNull_0; | |
value_1 = value_0; | |
// copy all the results into MutableRow | |
if (!isNull_1) { | |
mutableRow.setInt(0, value_1); | |
} else { | |
mutableRow.setNullAt(0); | |
} | |
return mutableRow; | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// codegenStageId=1 | |
final class GeneratedIteratorForCodegenStage1 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
private Object[] references; | |
private scala.collection.Iterator[] inputs; | |
private long batchscan_scanTime_0; | |
private int batchscan_batchIdx_0; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] batchscan_mutableStateArray_3 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[2]; | |
private org.apache.spark.sql.vectorized.ColumnarBatch[] batchscan_mutableStateArray_1 = new org.apache.spark.sql.vectorized.ColumnarBatch[1]; | |
private scala.collection.Iterator[] batchscan_mutableStateArray_0 = new scala.collection.Iterator[1]; | |
private org.apache.spark.sql.vectorized.ColumnVector[] batchscan_mutableStateArray_2 = new org.apache.spark.sql.vectorized.ColumnVector[2]; | |
public GeneratedIteratorForCodegenStage1(Object[] references) { | |
this.references = references; | |
} | |
public void init(int index, scala.collection.Iterator[] inputs) { | |
partitionIndex = index; | |
this.inputs = inputs; | |
batchscan_mutableStateArray_0[0] = inputs[0]; | |
batchscan_mutableStateArray_3[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
batchscan_mutableStateArray_3[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
} | |
protected void processNext() throws java.io.IOException { | |
if (batchscan_mutableStateArray_1[0] == null) { | |
batchscan_nextBatch_0(); | |
} | |
while ( batchscan_mutableStateArray_1[0] != null) { | |
int batchscan_numRows_0 = batchscan_mutableStateArray_1[0].numRows(); | |
int batchscan_localEnd_0 = batchscan_numRows_0 - batchscan_batchIdx_0; | |
for (int batchscan_localIdx_0 = 0; batchscan_localIdx_0 < batchscan_localEnd_0; batchscan_localIdx_0++) { | |
int batchscan_rowIdx_0 = batchscan_batchIdx_0 + batchscan_localIdx_0; | |
boolean batchscan_isNull_0 = batchscan_mutableStateArray_2[0].isNullAt(batchscan_rowIdx_0); | |
UTF8String batchscan_value_0 = batchscan_isNull_0 ? null : (batchscan_mutableStateArray_2[0].getUTF8String(batchscan_rowIdx_0)); | |
boolean batchscan_isNull_1 = batchscan_mutableStateArray_2[1].isNullAt(batchscan_rowIdx_0); | |
int batchscan_value_1 = batchscan_isNull_1 ? -1 : (batchscan_mutableStateArray_2[1].getInt(batchscan_rowIdx_0)); | |
batchscan_mutableStateArray_3[1].reset(); | |
batchscan_mutableStateArray_3[1].zeroOutNullBytes(); | |
if (batchscan_isNull_0) { | |
batchscan_mutableStateArray_3[1].setNullAt(0); | |
} else { | |
batchscan_mutableStateArray_3[1].write(0, batchscan_value_0); | |
} | |
if (batchscan_isNull_1) { | |
batchscan_mutableStateArray_3[1].setNullAt(1); | |
} else { | |
batchscan_mutableStateArray_3[1].write(1, batchscan_value_1); | |
} | |
append((batchscan_mutableStateArray_3[1].getRow())); | |
if (shouldStop()) { batchscan_batchIdx_0 = batchscan_rowIdx_0 + 1; return; } | |
} | |
batchscan_batchIdx_0 = batchscan_numRows_0; | |
batchscan_mutableStateArray_1[0] = null; | |
batchscan_nextBatch_0(); | |
} | |
((org.apache.spark.sql.execution.metric.SQLMetric) references[1] /* scanTime */).add(batchscan_scanTime_0 / (1000 * 1000)); | |
batchscan_scanTime_0 = 0; | |
} | |
private void batchscan_nextBatch_0() throws java.io.IOException { | |
long getBatchStart = System.nanoTime(); | |
if (batchscan_mutableStateArray_0[0].hasNext()) { | |
batchscan_mutableStateArray_1[0] = (org.apache.spark.sql.vectorized.ColumnarBatch)batchscan_mutableStateArray_0[0].next(); | |
((org.apache.spark.sql.execution.metric.SQLMetric) references[0] /* numOutputRows */).add(batchscan_mutableStateArray_1[0].numRows()); | |
batchscan_batchIdx_0 = 0; | |
batchscan_mutableStateArray_2[0] = (org.apache.spark.sql.vectorized.ColumnVector) batchscan_mutableStateArray_1[0].column(0); | |
batchscan_mutableStateArray_2[1] = (org.apache.spark.sql.vectorized.ColumnVector) batchscan_mutableStateArray_1[0].column(1); | |
} | |
batchscan_scanTime_0 += System.nanoTime() - getBatchStart; | |
} | |
} | |
19/05/06 10:35:04 DEBUG BatchEvalPythonExec: Creating MutableProj: ArrayBuffer(count#1), inputSchema: List(word#0, count#1) | |
19/05/06 10:35:04 DEBUG GenerateMutableProjection: code for input[1, int, true]: | |
class SpecificMutableProjection extends org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection { | |
private Object[] references; | |
private InternalRow mutableRow; | |
private int value_1; | |
private boolean isNull_1; | |
public SpecificMutableProjection(Object[] references) { | |
this.references = references; | |
mutableRow = new org.apache.spark.sql.catalyst.expressions.GenericInternalRow(1); | |
} | |
public void initialize(int partitionIndex) { | |
} | |
public org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection target(InternalRow row) { | |
mutableRow = row; | |
return this; | |
} | |
/* Provide immutable access to the last projected row. */ | |
public InternalRow currentValue() { | |
return (InternalRow) mutableRow; | |
} | |
public java.lang.Object apply(java.lang.Object _i) { | |
InternalRow i = (InternalRow) _i; | |
boolean isNull_0 = i.isNullAt(1); | |
int value_0 = isNull_0 ? | |
-1 : (i.getInt(1)); | |
isNull_1 = isNull_0; | |
value_1 = value_0; | |
// copy all the results into MutableRow | |
if (!isNull_1) { | |
mutableRow.setInt(0, value_1); | |
} else { | |
mutableRow.setNullAt(0); | |
} | |
return mutableRow; | |
} | |
} | |
19/05/06 10:35:05 DEBUG GenerateUnsafeProjection: code for input[0, string, true],input[1, int, true],input[2, int, true]: | |
class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { | |
private Object[] references; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; | |
public SpecificUnsafeProjection(Object[] references) { | |
this.references = references; | |
mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(3, 32); | |
} | |
public void initialize(int partitionIndex) { | |
} | |
// Scala.Function1 need this | |
public java.lang.Object apply(java.lang.Object row) { | |
return apply((InternalRow) row); | |
} | |
public UnsafeRow apply(InternalRow i) { | |
mutableStateArray_0[0].reset(); | |
mutableStateArray_0[0].zeroOutNullBytes(); | |
boolean isNull_0 = i.isNullAt(0); | |
UTF8String value_0 = isNull_0 ? | |
null : (i.getUTF8String(0)); | |
if (isNull_0) { | |
mutableStateArray_0[0].setNullAt(0); | |
} else { | |
mutableStateArray_0[0].write(0, value_0); | |
} | |
boolean isNull_1 = i.isNullAt(1); | |
int value_1 = isNull_1 ? | |
-1 : (i.getInt(1)); | |
if (isNull_1) { | |
mutableStateArray_0[0].setNullAt(1); | |
} else { | |
mutableStateArray_0[0].write(1, value_1); | |
} | |
boolean isNull_2 = i.isNullAt(2); | |
int value_2 = isNull_2 ? | |
-1 : (i.getInt(2)); | |
if (isNull_2) { | |
mutableStateArray_0[0].setNullAt(2); | |
} else { | |
mutableStateArray_0[0].write(2, value_2); | |
} | |
return (mutableStateArray_0[0].getRow()); | |
} | |
} | |
// codegenStageId=2 | |
final class GeneratedIteratorForCodegenStage2 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
private Object[] references; | |
private scala.collection.Iterator[] inputs; | |
private scala.collection.Iterator inputadapter_input_0; | |
private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] project_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; | |
public GeneratedIteratorForCodegenStage2(Object[] references) { | |
this.references = references; | |
} | |
public void init(int index, scala.collection.Iterator[] inputs) { | |
partitionIndex = index; | |
this.inputs = inputs; | |
inputadapter_input_0 = inputs[0]; | |
project_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
} | |
protected void processNext() throws java.io.IOException { | |
while ( inputadapter_input_0.hasNext()) { | |
InternalRow inputadapter_row_0 = (InternalRow) inputadapter_input_0.next(); | |
boolean inputadapter_isNull_0 = inputadapter_row_0.isNullAt(0); | |
UTF8String inputadapter_value_0 = inputadapter_isNull_0 ? | |
null : (inputadapter_row_0.getUTF8String(0)); | |
boolean inputadapter_isNull_2 = inputadapter_row_0.isNullAt(2); | |
int inputadapter_value_2 = inputadapter_isNull_2 ? | |
-1 : (inputadapter_row_0.getInt(2)); | |
project_mutableStateArray_0[0].reset(); | |
project_mutableStateArray_0[0].zeroOutNullBytes(); | |
if (inputadapter_isNull_0) { | |
project_mutableStateArray_0[0].setNullAt(0); | |
} else { | |
project_mutableStateArray_0[0].write(0, inputadapter_value_0); | |
} | |
if (inputadapter_isNull_2) { | |
project_mutableStateArray_0[0].setNullAt(1); | |
} else { | |
project_mutableStateArray_0[0].write(1, inputadapter_value_2); | |
} | |
append((project_mutableStateArray_0[0].getRow())); | |
if (shouldStop()) return; | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment