Search in sources :

Example 41 with LongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector in project hive by apache.

the class GenericUDFFormatNumber method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    Object arg0;
    Object arg1;
    if ((arg0 = arguments[0].get()) == null || (arg1 = arguments[1].get()) == null) {
        return null;
    }
    if (!dType.equals(PrimitiveCategory.STRING)) {
        int dValue = ((IntObjectInspector) argumentOIs[1]).get(arg1);
        if (dValue < 0) {
            throw new HiveException("Argument 2 of function FORMAT_NUMBER must be >= 0, but \"" + dValue + "\" was found");
        }
        if (dValue != lastDValue) {
            // construct a new DecimalFormat only if a new dValue
            pattern.delete(0, pattern.length());
            pattern.append("#,###,###,###,###,###,##0");
            // decimal place
            if (dValue > 0) {
                pattern.append(".");
                for (int i = 0; i < dValue; i++) {
                    pattern.append("0");
                }
            }
            DecimalFormat dFormat = new DecimalFormat(pattern.toString());
            lastDValue = dValue;
            numberFormat.applyPattern(dFormat.toPattern());
        }
    }
    double xDoubleValue = 0.0;
    float xFloatValue = 0.0f;
    HiveDecimal xDecimalValue = null;
    int xIntValue = 0;
    long xLongValue = 0L;
    PrimitiveObjectInspector xObjectInspector = (PrimitiveObjectInspector) argumentOIs[0];
    switch(xObjectInspector.getPrimitiveCategory()) {
        case VOID:
        case DOUBLE:
            xDoubleValue = ((DoubleObjectInspector) argumentOIs[0]).get(arg0);
            resultText.set(numberFormat.format(xDoubleValue));
            break;
        case FLOAT:
            xFloatValue = ((FloatObjectInspector) argumentOIs[0]).get(arg0);
            resultText.set(numberFormat.format(xFloatValue));
            break;
        case DECIMAL:
            xDecimalValue = ((HiveDecimalObjectInspector) argumentOIs[0]).getPrimitiveJavaObject(arg0);
            resultText.set(numberFormat.format(xDecimalValue.bigDecimalValue()));
            break;
        case BYTE:
        case SHORT:
        case INT:
            xIntValue = ((IntObjectInspector) argumentOIs[0]).get(arg0);
            resultText.set(numberFormat.format(xIntValue));
            break;
        case LONG:
            xLongValue = ((LongObjectInspector) argumentOIs[0]).get(arg0);
            resultText.set(numberFormat.format(xLongValue));
            break;
        default:
            throw new HiveException("Argument 1 of function FORMAT_NUMBER must be " + serdeConstants.TINYINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.SMALLINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.INT_TYPE_NAME + "\"" + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.DOUBLE_TYPE_NAME + "\"" + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\"" + " or \"" + serdeConstants.DECIMAL_TYPE_NAME + "\", but \"" + argumentOIs[0].getTypeName() + "\" was found.");
    }
    return resultText;
}
Also used : IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DecimalFormat(java.text.DecimalFormat) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 42 with LongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector in project hive by apache.

the class TypedBytesSerDe method serializeField.

private void serializeField(Object o, ObjectInspector oi, Object reuse) throws IOException {
    switch(oi.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
                switch(poi.getPrimitiveCategory()) {
                    case VOID:
                        {
                            return;
                        }
                    case BOOLEAN:
                        {
                            BooleanObjectInspector boi = (BooleanObjectInspector) poi;
                            BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable) reuse;
                            r.set(boi.get(o));
                            tbOut.write(r);
                            return;
                        }
                    case BYTE:
                        {
                            ByteObjectInspector boi = (ByteObjectInspector) poi;
                            ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
                            r.set(boi.get(o));
                            tbOut.write(r);
                            return;
                        }
                    case SHORT:
                        {
                            ShortObjectInspector spoi = (ShortObjectInspector) poi;
                            ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
                            r.set(spoi.get(o));
                            tbOut.write(r);
                            return;
                        }
                    case INT:
                        {
                            IntObjectInspector ioi = (IntObjectInspector) poi;
                            IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
                            r.set(ioi.get(o));
                            tbOut.write(r);
                            return;
                        }
                    case LONG:
                        {
                            LongObjectInspector loi = (LongObjectInspector) poi;
                            LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
                            r.set(loi.get(o));
                            tbOut.write(r);
                            return;
                        }
                    case FLOAT:
                        {
                            FloatObjectInspector foi = (FloatObjectInspector) poi;
                            FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable) reuse;
                            r.set(foi.get(o));
                            tbOut.write(r);
                            return;
                        }
                    case DOUBLE:
                        {
                            DoubleObjectInspector doi = (DoubleObjectInspector) poi;
                            DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
                            r.set(doi.get(o));
                            tbOut.write(r);
                            return;
                        }
                    case STRING:
                        {
                            StringObjectInspector soi = (StringObjectInspector) poi;
                            Text t = soi.getPrimitiveWritableObject(o);
                            tbOut.write(t);
                            return;
                        }
                    default:
                        {
                            throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
                        }
                }
            }
        case LIST:
        case MAP:
        case STRUCT:
            {
                // For complex object, serialize to JSON format
                String s = SerDeUtils.getJSONString(o, oi);
                Text t = reuse == null ? new Text() : (Text) reuse;
                // convert to Text and write it
                t.set(s);
                tbOut.write(t);
            }
        default:
            {
                throw new RuntimeException("Unrecognized type: " + oi.getCategory());
            }
    }
}
Also used : LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Text(org.apache.hadoop.io.Text) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) FloatWritable(org.apache.hadoop.io.FloatWritable) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 43 with LongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector in project hive by apache.

the class HBaseRowSerializer method serialize.

public Writable serialize(Object obj, ObjectInspector objInspector) throws Exception {
    if (objInspector.getCategory() != ObjectInspector.Category.STRUCT) {
        throw new SerDeException(getClass().toString() + " can only serialize struct types, but we got: " + objInspector.getTypeName());
    }
    // Prepare the field ObjectInspectors
    StructObjectInspector soi = (StructObjectInspector) objInspector;
    List<? extends StructField> fields = soi.getAllStructFieldRefs();
    List<Object> values = soi.getStructFieldsDataAsList(obj);
    StructField field = fields.get(keyIndex);
    Object value = values.get(keyIndex);
    byte[] key = keyFactory.serializeKey(value, field);
    if (key == null) {
        throw new SerDeException("HBase row key cannot be NULL");
    }
    long timestamp = putTimestamp;
    if (timestamp < 0 && timestampIndex >= 0) {
        ObjectInspector inspector = fields.get(timestampIndex).getFieldObjectInspector();
        value = values.get(timestampIndex);
        if (inspector instanceof LongObjectInspector) {
            timestamp = ((LongObjectInspector) inspector).get(value);
        } else {
            PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector;
            timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).toEpochMilli();
        }
    }
    Put put = timestamp >= 0 ? new Put(key, timestamp) : new Put(key);
    // Serialize each field
    for (int i = 0; i < fields.size(); i++) {
        if (i == keyIndex || i == timestampIndex) {
            continue;
        }
        field = fields.get(i);
        value = values.get(i);
        serializeField(value, field, columnMappings[i], put);
    }
    return new PutWritable(put);
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) Put(org.apache.hadoop.hbase.client.Put) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 44 with LongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector in project hive by apache.

the class HiveHBaseInputFormatUtil method getTimestampVal.

static long getTimestampVal(IndexSearchCondition sc) throws IOException {
    long timestamp;
    try {
        ExprNodeConstantEvaluator eval = new ExprNodeConstantEvaluator(sc.getConstantDesc());
        ObjectInspector inspector = eval.initialize(null);
        Object value = eval.evaluate(null);
        if (inspector instanceof LongObjectInspector) {
            timestamp = ((LongObjectInspector) inspector).get(value);
        } else {
            PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector;
            timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).toEpochMilli();
        }
    } catch (HiveException e) {
        throw new IOException(e);
    }
    return timestamp;
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ExprNodeConstantEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeConstantEvaluator) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) IOException(java.io.IOException)

Example 45 with LongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector in project hive by apache.

the class DruidSerDe method serialize.

@Override
public Writable serialize(Object o, ObjectInspector objectInspector) throws SerDeException {
    if (objectInspector.getCategory() != ObjectInspector.Category.STRUCT) {
        throw new SerDeException(getClass().toString() + " can only serialize struct types, but we got: " + objectInspector.getTypeName());
    }
    // Prepare the field ObjectInspectors
    StructObjectInspector soi = (StructObjectInspector) objectInspector;
    List<? extends StructField> fields = soi.getAllStructFieldRefs();
    List<Object> values = soi.getStructFieldsDataAsList(o);
    // We deserialize the result
    final Map<String, Object> value = new HashMap<>();
    for (int i = 0; i < columns.length; i++) {
        if (values.get(i) == null) {
            // null, we just add it
            value.put(columns[i], null);
            continue;
        }
        final Object res;
        switch(types[i].getPrimitiveCategory()) {
            case TIMESTAMP:
                res = ((TimestampObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i)).toEpochMilli();
                break;
            case TIMESTAMPLOCALTZ:
                res = ((TimestampLocalTZObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i)).getZonedDateTime().toInstant().toEpochMilli();
                break;
            case BYTE:
                res = ((ByteObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
                break;
            case SHORT:
                res = ((ShortObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
                break;
            case INT:
                res = ((IntObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
                break;
            case LONG:
                res = ((LongObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
                break;
            case FLOAT:
                res = ((FloatObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
                break;
            case DOUBLE:
                res = ((DoubleObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
                break;
            case CHAR:
                res = ((HiveCharObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i)).getValue();
                break;
            case VARCHAR:
                res = ((HiveVarcharObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i)).getValue();
                break;
            case STRING:
                res = ((StringObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i));
                break;
            case BOOLEAN:
                res = ((BooleanObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i)) ? 1L : 0L;
                break;
            default:
                throw new SerDeException("Unsupported type: " + types[i].getPrimitiveCategory());
        }
        value.put(columns[i], res);
    }
    // Extract the partitions keys segments granularity and partition key if any
    // First Segment Granularity has to be here.
    final int granularityFieldIndex = columns.length;
    assert values.size() > granularityFieldIndex;
    Preconditions.checkArgument(fields.get(granularityFieldIndex).getFieldName().equals(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME));
    Timestamp timestamp = ((TimestampObjectInspector) fields.get(granularityFieldIndex).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(granularityFieldIndex));
    Preconditions.checkNotNull(timestamp, "Timestamp column cannot have null value");
    value.put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME, timestamp.toEpochMilli());
    if (values.size() == columns.length + 2) {
        // Then partition number if any.
        final int partitionNumPos = granularityFieldIndex + 1;
        Preconditions.checkArgument(fields.get(partitionNumPos).getFieldName().equals(Constants.DRUID_SHARD_KEY_COL_NAME), String.format("expecting to encounter %s but was %s", Constants.DRUID_SHARD_KEY_COL_NAME, fields.get(partitionNumPos).getFieldName()));
        value.put(Constants.DRUID_SHARD_KEY_COL_NAME, ((LongObjectInspector) fields.get(partitionNumPos).getFieldObjectInspector()).get(values.get(partitionNumPos)));
    }
    return new DruidWritable(value);
}
Also used : HashMap(java.util.HashMap) TimestampLocalTZObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

LongObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector)39 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)32 BinaryObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector)26 DoubleObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector)23 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)21 IntObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector)20 ByteObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector)18 HiveDecimalObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector)18 TimestampObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector)18 FloatObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector)17 ShortObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector)17 BooleanObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector)16 DateObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector)16 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)14 HiveCharObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector)13 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)12 HiveVarcharObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector)12 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)11 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)10 Text (org.apache.hadoop.io.Text)10