Search in sources :

Example 81 with TypeInfoFactory.stringTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.

the class TestVectorStringConcat method doRowStringConcatTest.

private void doRowStringConcatTest(TypeInfo stringTypeInfo, TypeInfo integerTypeInfo, List<String> columns, List<ExprNodeDesc> children, Object[][] randomRows, ColumnScalarMode columnScalarMode, ObjectInspector rowInspector, GenericUDF genericUdf, Object[] resultObjects) throws Exception {
    /*
    System.out.println(
        "*DEBUG* stringTypeInfo " + stringTypeInfo.toString() +
        " integerTypeInfo " + integerTypeInfo +
        " stringConcatTestMode ROW_MODE" +
        " columnScalarMode " + columnScalarMode +
        " genericUdf " + genericUdf.toString());
    */
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, genericUdf, children);
    HiveConf hiveConf = new HiveConf();
    ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
    evaluator.initialize(rowInspector);
    ObjectInspector objectInspector = evaluator.getOutputOI();
    final int rowCount = randomRows.length;
    for (int i = 0; i < rowCount; i++) {
        Object[] row = randomRows[i];
        Object result = evaluator.evaluate(row);
        Object copyResult = ObjectInspectorUtils.copyToStandardObject(result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
        resultObjects[i] = copyResult;
    }
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) HiveConf(org.apache.hadoop.hive.conf.HiveConf)

Example 82 with TypeInfoFactory.stringTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.

the class TestMapJoinOperator method doTestString2.

public boolean doTestString2(long seed, int hiveConfVariation, VectorMapJoinVariation vectorMapJoinVariation, MapJoinPlanVariation mapJoinPlanVariation) throws Exception {
    int rowCount = 10;
    HiveConf hiveConf = new HiveConf();
    if (!addNonLongHiveConfVariation(hiveConfVariation, hiveConf)) {
        return true;
    }
    TypeInfo[] bigTableTypeInfos = null;
    int[] bigTableKeyColumnNums = null;
    TypeInfo[] smallTableValueTypeInfos = null;
    int[] smallTableRetainKeyColumnNums = null;
    SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
    MapJoinTestDescription testDesc = null;
    MapJoinTestData testData = null;
    // One STRING key column; Small Table value: NONE (tests INNER_BIG_ONLY, LEFT_SEMI).
    bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.stringTypeInfo };
    bigTableKeyColumnNums = new int[] { 0 };
    smallTableRetainKeyColumnNums = new int[] { 0 };
    smallTableValueTypeInfos = new TypeInfo[] {};
    smallTableGenerationParameters = new SmallTableGenerationParameters();
    // ----------------------------------------------------------------------------------------------
    testDesc = new MapJoinTestDescription(hiveConf, vectorMapJoinVariation, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueTypeInfos, smallTableRetainKeyColumnNums, smallTableGenerationParameters, mapJoinPlanVariation);
    if (!goodTestVariation(testDesc)) {
        return false;
    }
    // Prepare data.  Good for ANY implementation variation.
    testData = new MapJoinTestData(rowCount, testDesc, seed);
    executeTest(testDesc, testData, "testString2");
    return false;
}
Also used : SmallTableGenerationParameters(org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters) HiveConf(org.apache.hadoop.hive.conf.HiveConf) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 83 with TypeInfoFactory.stringTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.

the class TestGenericUDFDateFormat method testTimestampPriorTo1900.

@Test
public void testTimestampPriorTo1900() throws HiveException {
    GenericUDFDateFormat udf = new GenericUDFDateFormat();
    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    Text fmtText = new Text("yyyy-MM-dd HH:mm:ss.SSS z");
    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
    ObjectInspector[] arguments = { valueOI0, valueOI1 };
    udf.initialize(arguments);
    runAndVerifyStr("1400-01-14 01:01:10.123", fmtText, "1400-01-14 01:01:10.123 PST", udf);
    runAndVerifyStr("1800-01-14 01:01:10.123", fmtText, "1800-01-14 01:01:10.123 PST", udf);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Text(org.apache.hadoop.io.Text) Test(org.junit.Test)

Example 84 with TypeInfoFactory.stringTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.

the class TestGenericUDFDateFormat method testJulianDates.

@Test
public void testJulianDates() throws HiveException {
    GenericUDFDateFormat udf = new GenericUDFDateFormat();
    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    Text fmtText = new Text("dd---MM--yyyy");
    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
    ObjectInspector[] arguments = { valueOI0, valueOI1 };
    udf.initialize(arguments);
    runAndVerifyStr("1001-01-05", fmtText, "05---01--1001", udf);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Text(org.apache.hadoop.io.Text) Test(org.junit.Test)

Example 85 with TypeInfoFactory.stringTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.

the class TestGenericUDFDateFormat method testDateFormatStr.

@Test
public void testDateFormatStr() throws HiveException {
    GenericUDFDateFormat udf = new GenericUDFDateFormat();
    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    Text fmtText = new Text("EEEE");
    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
    ObjectInspector[] arguments = { valueOI0, valueOI1 };
    udf.initialize(arguments);
    // date str
    runAndVerifyStr("2015-04-05", fmtText, "Sunday", udf);
    runAndVerifyStr("2015-04-06", fmtText, "Monday", udf);
    runAndVerifyStr("2015-04-07", fmtText, "Tuesday", udf);
    runAndVerifyStr("2015-04-08", fmtText, "Wednesday", udf);
    runAndVerifyStr("2015-04-09", fmtText, "Thursday", udf);
    runAndVerifyStr("2015-04-10", fmtText, "Friday", udf);
    runAndVerifyStr("2015-04-11", fmtText, "Saturday", udf);
    runAndVerifyStr("2015-04-12", fmtText, "Sunday", udf);
    // ts str
    runAndVerifyStr("2015-04-05 10:30:45", fmtText, "Sunday", udf);
    runAndVerifyStr("2015-04-06 10:30:45", fmtText, "Monday", udf);
    runAndVerifyStr("2015-04-07 10:30:45", fmtText, "Tuesday", udf);
    runAndVerifyStr("2015-04-08 10:30:45", fmtText, "Wednesday", udf);
    runAndVerifyStr("2015-04-09 10:30", fmtText, "Thursday", udf);
    runAndVerifyStr("2015-04-10 10:30:45.123", fmtText, "Friday", udf);
    runAndVerifyStr("2015-04-11T10:30:45", fmtText, "Saturday", udf);
    runAndVerifyStr("2015-04-12 10", fmtText, "Sunday", udf);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Text(org.apache.hadoop.io.Text) Test(org.junit.Test)

Aggregations

Test (org.junit.Test)65 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)44 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)36 Text (org.apache.hadoop.io.Text)34 ArrayList (java.util.ArrayList)19 HiveConf (org.apache.hadoop.hive.conf.HiveConf)19 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)17 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)16 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)16 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)14 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)14 SmallTableGenerationParameters (org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters)13 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)13 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)12 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)12 Properties (java.util.Properties)11 BytesColumnVector (org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector)11 LongColumnVector (org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)11 TestVectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch)11 Configuration (org.apache.hadoop.conf.Configuration)10