use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class VectorUDAFAvgTimestamp method aggregateInputSelection.
@Override
public void aggregateInputSelection(VectorAggregationBufferRow[] aggregationBufferSets, int bufferIndex, VectorizedRowBatch batch) throws HiveException {
int batchSize = batch.size;
if (batchSize == 0) {
return;
}
inputExpression.evaluate(batch);
TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[this.inputExpression.getOutputColumn()];
if (inputColVector.noNulls) {
if (inputColVector.isRepeating) {
iterateNoNullsRepeatingWithAggregationSelection(aggregationBufferSets, bufferIndex, inputColVector.getDouble(0), batchSize);
} else {
if (batch.selectedInUse) {
iterateNoNullsSelectionWithAggregationSelection(aggregationBufferSets, bufferIndex, inputColVector, batch.selected, batchSize);
} else {
iterateNoNullsWithAggregationSelection(aggregationBufferSets, bufferIndex, inputColVector, batchSize);
}
}
} else {
if (inputColVector.isRepeating) {
if (batch.selectedInUse) {
iterateHasNullsRepeatingSelectionWithAggregationSelection(aggregationBufferSets, bufferIndex, inputColVector.getDouble(0), batchSize, batch.selected, inputColVector.isNull);
} else {
iterateHasNullsRepeatingWithAggregationSelection(aggregationBufferSets, bufferIndex, inputColVector.getDouble(0), batchSize, inputColVector.isNull);
}
} else {
if (batch.selectedInUse) {
iterateHasNullsSelectionWithAggregationSelection(aggregationBufferSets, bufferIndex, inputColVector, batchSize, batch.selected, inputColVector.isNull);
} else {
iterateHasNullsWithAggregationSelection(aggregationBufferSets, bufferIndex, inputColVector, batchSize, inputColVector.isNull);
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class VectorUDAFStdPopTimestamp method aggregateInputSelection.
@Override
public void aggregateInputSelection(VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, VectorizedRowBatch batch) throws HiveException {
inputExpression.evaluate(batch);
TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[this.inputExpression.getOutputColumn()];
int batchSize = batch.size;
if (batchSize == 0) {
return;
}
if (inputColVector.isRepeating) {
if (inputColVector.noNulls || !inputColVector.isNull[0]) {
iterateRepeatingNoNullsWithAggregationSelection(aggregationBufferSets, aggregateIndex, inputColVector.getDouble(0), batchSize);
}
} else if (!batch.selectedInUse && inputColVector.noNulls) {
iterateNoSelectionNoNullsWithAggregationSelection(aggregationBufferSets, aggregateIndex, inputColVector, batchSize);
} else if (!batch.selectedInUse) {
iterateNoSelectionHasNullsWithAggregationSelection(aggregationBufferSets, aggregateIndex, inputColVector, batchSize, inputColVector.isNull);
} else if (inputColVector.noNulls) {
iterateSelectionNoNullsWithAggregationSelection(aggregationBufferSets, aggregateIndex, inputColVector, batchSize, batch.selected);
} else {
iterateSelectionHasNullsWithAggregationSelection(aggregationBufferSets, aggregateIndex, inputColVector, batchSize, inputColVector.isNull, batch.selected);
}
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class VectorUDAFStdPopTimestamp method aggregateInput.
@Override
public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch) throws HiveException {
inputExpression.evaluate(batch);
TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[this.inputExpression.getOutputColumn()];
int batchSize = batch.size;
if (batchSize == 0) {
return;
}
Aggregation myagg = (Aggregation) agg;
if (inputColVector.isRepeating) {
if (inputColVector.noNulls) {
iterateRepeatingNoNulls(myagg, inputColVector.getDouble(0), batchSize);
}
} else if (!batch.selectedInUse && inputColVector.noNulls) {
iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
} else if (!batch.selectedInUse) {
iterateNoSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull);
} else if (inputColVector.noNulls) {
iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
} else {
iterateSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull, batch.selected);
}
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class VectorUDAFStdSampTimestamp method aggregateInput.
@Override
public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch) throws HiveException {
inputExpression.evaluate(batch);
TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[this.inputExpression.getOutputColumn()];
int batchSize = batch.size;
if (batchSize == 0) {
return;
}
Aggregation myagg = (Aggregation) agg;
if (inputColVector.isRepeating) {
if (inputColVector.noNulls) {
iterateRepeatingNoNulls(myagg, inputColVector.getDouble(0), batchSize);
}
} else if (!batch.selectedInUse && inputColVector.noNulls) {
iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
} else if (!batch.selectedInUse) {
iterateNoSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull);
} else if (inputColVector.noNulls) {
iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
} else {
iterateSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull, batch.selected);
}
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class RecordReaderImpl method copyTimestampColumn.
void copyTimestampColumn(ColumnVector destination, ColumnVector source, int sourceOffset, int length) {
TimestampColumnVector castedSource = (TimestampColumnVector) source;
TimestampColumnVector castedDestination = (TimestampColumnVector) destination;
castedDestination.isRepeating = castedSource.isRepeating;
castedDestination.noNulls = castedSource.noNulls;
if (source.isRepeating) {
castedDestination.isNull[0] = castedSource.isNull[0];
castedDestination.time[0] = castedSource.time[0];
castedDestination.nanos[0] = castedSource.nanos[0];
} else {
if (!castedSource.noNulls) {
castedDestination.noNulls = true;
for (int r = 0; r < length; ++r) {
castedDestination.isNull[r] = castedSource.isNull[sourceOffset + r];
castedDestination.time[r] = castedSource.time[sourceOffset + r];
castedDestination.nanos[r] = castedSource.nanos[sourceOffset + r];
}
} else {
for (int r = 0; r < length; ++r) {
castedDestination.time[r] = castedSource.time[sourceOffset + r];
castedDestination.nanos[r] = castedSource.nanos[sourceOffset + r];
}
}
}
}
Aggregations