use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class TestVectorArithmetic method doRowArithmeticTest.
private void doRowArithmeticTest(TypeInfo typeInfo1, TypeInfo typeInfo2, List<String> columns, List<ExprNodeDesc> children, ExprNodeGenericFuncDesc exprDesc, Arithmetic arithmetic, Object[][] randomRows, ColumnScalarMode columnScalarMode, ObjectInspector rowInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception {
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo1.toString() +
" typeInfo2 " + typeInfo2 +
" arithmeticTestMode ROW_MODE" +
" columnScalarMode " + columnScalarMode +
" exprDesc " + exprDesc.toString());
*/
HiveConf hiveConf = new HiveConf();
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
evaluator.initialize(rowInspector);
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(outputTypeInfo);
final int rowCount = randomRows.length;
for (int i = 0; i < rowCount; i++) {
Object[] row = randomRows[i];
Object result = evaluator.evaluate(row);
Object copyResult = null;
try {
copyResult = ObjectInspectorUtils.copyToStandardObject(result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
} catch (Exception e) {
Assert.fail(e.toString());
}
resultObjects[i] = copyResult;
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class TestVectorDateAddSub method extractResultObjects.
private void extractResultObjects(VectorizedRowBatch batch, int rowIndex, VectorExtractRow resultVectorExtractRow, Object[] scrqtchRow, TypeInfo targetTypeInfo, Object[] resultObjects) {
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(targetTypeInfo);
boolean selectedInUse = batch.selectedInUse;
int[] selected = batch.selected;
for (int logicalIndex = 0; logicalIndex < batch.size; logicalIndex++) {
final int batchIndex = (selectedInUse ? selected[logicalIndex] : logicalIndex);
resultVectorExtractRow.extractRow(batch, batchIndex, scrqtchRow);
Object copyResult = ObjectInspectorUtils.copyToStandardObject(scrqtchRow[0], objectInspector, ObjectInspectorCopyOption.WRITABLE);
resultObjects[rowIndex++] = copyResult;
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class TestVectorDateAddSub method doRowDateAddSubTest.
private void doRowDateAddSubTest(TypeInfo dateTimeStringTypeInfo, TypeInfo integerTypeInfo, List<String> columns, List<ExprNodeDesc> children, boolean isAdd, ExprNodeGenericFuncDesc exprDesc, Object[][] randomRows, ColumnScalarMode columnScalarMode, ObjectInspector rowInspector, Object[] resultObjects) throws Exception {
/*
System.out.println(
"*DEBUG* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() +
" integerTypeInfo " + integerTypeInfo +
" isAdd " + isAdd +
" dateAddSubTestMode ROW_MODE" +
" columnScalarMode " + columnScalarMode +
" exprDesc " + exprDesc.toString());
*/
HiveConf hiveConf = new HiveConf();
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
evaluator.initialize(rowInspector);
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(TypeInfoFactory.dateTypeInfo);
final int rowCount = randomRows.length;
for (int i = 0; i < rowCount; i++) {
Object[] row = randomRows[i];
Object result = evaluator.evaluate(row);
Object copyResult = ObjectInspectorUtils.copyToStandardObject(result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
resultObjects[i] = copyResult;
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class VectorGroupByOperator method initializeOp.
@Override
protected void initializeOp(Configuration hconf) throws HiveException {
super.initializeOp(hconf);
isLlap = LlapProxy.isDaemon();
VectorExpression.doTransientInit(keyExpressions, hconf);
List<ObjectInspector> objectInspectors = new ArrayList<ObjectInspector>();
List<ExprNodeDesc> keysDesc = conf.getKeys();
try {
List<String> outputFieldNames = conf.getOutputColumnNames();
final int outputCount = outputFieldNames.size();
for (int i = 0; i < outputKeyLength; ++i) {
VectorExpressionWriter vew = VectorExpressionWriterFactory.genVectorExpressionWritable(keysDesc.get(i));
ObjectInspector oi = vew.getObjectInspector();
objectInspectors.add(oi);
}
final int aggregateCount = vecAggrDescs.length;
aggregators = new VectorAggregateExpression[aggregateCount];
for (int i = 0; i < aggregateCount; ++i) {
VectorAggregationDesc vecAggrDesc = vecAggrDescs[i];
Class<? extends VectorAggregateExpression> vecAggrClass = vecAggrDesc.getVecAggrClass();
VectorAggregateExpression vecAggrExpr = instantiateExpression(vecAggrDesc, hconf);
VectorExpression.doTransientInit(vecAggrExpr.getInputExpression(), hconf);
aggregators[i] = vecAggrExpr;
ObjectInspector objInsp = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(vecAggrDesc.getOutputTypeInfo());
Preconditions.checkState(objInsp != null);
objectInspectors.add(objInsp);
}
keyWrappersBatch = VectorHashKeyWrapperBatch.compileKeyWrapperBatch(keyExpressions);
aggregationBatchInfo = new VectorAggregationBufferBatch();
aggregationBatchInfo.compileAggregationBatchInfo(aggregators);
outputObjInspector = ObjectInspectorFactory.getStandardStructObjectInspector(outputFieldNames, objectInspectors);
vrbCtx = new VectorizedRowBatchCtx(outputFieldNames.toArray(new String[0]), outputTypeInfos, outputDataTypePhysicalVariations, /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, vOutContext.getScratchColumnTypeNames(), vOutContext.getScratchDataTypePhysicalVariations());
outputBatch = vrbCtx.createVectorizedRowBatch();
} catch (HiveException he) {
throw he;
} catch (Throwable e) {
throw new HiveException(e);
}
forwardCache = new Object[outputKeyLength + aggregators.length];
setupGroupingSets();
switch(vectorDesc.getProcessingMode()) {
case GLOBAL:
Preconditions.checkState(outputKeyLength == 0);
Preconditions.checkState(!groupingSetsPresent);
processingMode = this.new ProcessingModeGlobalAggregate();
break;
case HASH:
processingMode = this.new ProcessingModeHashAggregate();
break;
case MERGE_PARTIAL:
Preconditions.checkState(!groupingSetsPresent);
processingMode = this.new ProcessingModeReduceMergePartial();
break;
case STREAMING:
processingMode = this.new ProcessingModeStreaming();
break;
default:
throw new RuntimeException("Unsupported vector GROUP BY processing mode " + vectorDesc.getProcessingMode().name());
}
processingMode.initialize(hconf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class JsonSerDe method initialize.
/**
* Initialize the SerDe.
*
* @param conf System properties; can be null in compile time
* @param tbl table properties
* @param writeablePrimitivesDeserialize true if outputs are Hadoop Writable
*/
private void initialize(final Configuration conf, final Properties tbl, final boolean writeablePrimitivesDeserialize) {
log.debug("Initializing JsonSerDe: {}", tbl.entrySet());
final String nullEmpty = tbl.getProperty(NULL_EMPTY_LINES, "false");
this.nullEmptyLines = Boolean.parseBoolean(nullEmpty);
this.rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(getColumnNames(), getColumnTypes());
this.soi = (StructObjectInspector) TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(this.rowTypeInfo);
final TimestampParser tsParser;
final String parserFormats = tbl.getProperty(serdeConstants.TIMESTAMP_FORMATS);
if (parserFormats != null) {
tsParser = new TimestampParser(HiveStringUtils.splitAndUnEscape(parserFormats));
} else {
tsParser = new TimestampParser();
}
final String binaryEncodingStr = tbl.getProperty(BINARY_FORMAT, "base64");
this.binaryEncoding = BinaryEncoding.valueOf(binaryEncodingStr.toUpperCase());
this.jsonReader = new HiveJsonReader(this.soi, tsParser);
this.jsonWriter = new HiveJsonWriter(this.binaryEncoding, getColumnNames());
this.jsonReader.setBinaryEncoding(binaryEncoding);
this.jsonReader.enable(HiveJsonReader.Feature.COL_INDEX_PARSING);
if (writeablePrimitivesDeserialize) {
this.jsonReader.enable(HiveJsonReader.Feature.PRIMITIVE_TO_WRITABLE);
}
final String ignoreExtras = tbl.getProperty(IGNORE_EXTRA, "true");
if (Boolean.parseBoolean(ignoreExtras)) {
this.jsonReader.enable(HiveJsonReader.Feature.IGNORE_UNKNOWN_FIELDS);
}
final String stringifyComplex = tbl.getProperty(STRINGIFY_COMPLEX, "true");
if (Boolean.parseBoolean(stringifyComplex)) {
this.jsonReader.enable(HiveJsonReader.Feature.STRINGIFY_COMPLEX_FIELDS);
}
log.debug("Initialized SerDe {}", this);
log.debug("JSON Struct Reader: {}", jsonReader);
log.debug("JSON Struct Writer: {}", jsonWriter);
}
Aggregations