Search in sources :

Example 1 with Type

use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema.Type in project hive by apache.

the class HCatBaseStorer method getJavaObj.

/**
   * Convert from Pig value object to Hive value object
   * This method assumes that {@link #validateSchema(org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema, org.apache.hive.hcatalog.data.schema.HCatFieldSchema, org.apache.pig.impl.logicalLayer.schema.Schema, org.apache.hive.hcatalog.data.schema.HCatSchema, int)}
   * which checks the types in Pig schema are compatible with target Hive table, has been called.
   */
private Object getJavaObj(Object pigObj, HCatFieldSchema hcatFS) throws HCatException, BackendException {
    try {
        if (pigObj == null)
            return null;
        // The real work-horse. Spend time and energy in this method if there is
        // need to keep HCatStorer lean and go fast.
        Type type = hcatFS.getType();
        switch(type) {
            case BINARY:
                return ((DataByteArray) pigObj).get();
            case STRUCT:
                HCatSchema structSubSchema = hcatFS.getStructSubSchema();
                // Unwrap the tuple.
                List<Object> all = ((Tuple) pigObj).getAll();
                ArrayList<Object> converted = new ArrayList<Object>(all.size());
                for (int i = 0; i < all.size(); i++) {
                    converted.add(getJavaObj(all.get(i), structSubSchema.get(i)));
                }
                return converted;
            case ARRAY:
                // Unwrap the bag.
                DataBag pigBag = (DataBag) pigObj;
                HCatFieldSchema tupFS = hcatFS.getArrayElementSchema().get(0);
                boolean needTuple = tupFS.getType() == Type.STRUCT;
                List<Object> bagContents = new ArrayList<Object>((int) pigBag.size());
                Iterator<Tuple> bagItr = pigBag.iterator();
                while (bagItr.hasNext()) {
                    // If there is only one element in tuple contained in bag, we throw away the tuple.
                    bagContents.add(getJavaObj(needTuple ? bagItr.next() : bagItr.next().get(0), tupFS));
                }
                return bagContents;
            case MAP:
                Map<?, ?> pigMap = (Map<?, ?>) pigObj;
                Map<Object, Object> typeMap = new HashMap<Object, Object>();
                for (Entry<?, ?> entry : pigMap.entrySet()) {
                    // the value has a schema and not a FieldSchema
                    typeMap.put(// Schema validation enforces that the Key is a String
                    (String) entry.getKey(), getJavaObj(entry.getValue(), hcatFS.getMapValueSchema().get(0)));
                }
                return typeMap;
            case STRING:
            case INT:
            case BIGINT:
            case FLOAT:
            case DOUBLE:
                return pigObj;
            case SMALLINT:
                if ((Integer) pigObj < Short.MIN_VALUE || (Integer) pigObj > Short.MAX_VALUE) {
                    handleOutOfRangeValue(pigObj, hcatFS);
                    return null;
                }
                return ((Integer) pigObj).shortValue();
            case TINYINT:
                if ((Integer) pigObj < Byte.MIN_VALUE || (Integer) pigObj > Byte.MAX_VALUE) {
                    handleOutOfRangeValue(pigObj, hcatFS);
                    return null;
                }
                return ((Integer) pigObj).byteValue();
            case BOOLEAN:
                if (pigObj instanceof String) {
                    if (((String) pigObj).trim().compareTo("0") == 0) {
                        return Boolean.FALSE;
                    }
                    if (((String) pigObj).trim().compareTo("1") == 0) {
                        return Boolean.TRUE;
                    }
                    throw new BackendException("Unexpected type " + type + " for value " + pigObj + " of class " + pigObj.getClass().getName(), PigHCatUtil.PIG_EXCEPTION_CODE);
                }
                return Boolean.parseBoolean(pigObj.toString());
            case DECIMAL:
                BigDecimal bd = (BigDecimal) pigObj;
                DecimalTypeInfo dti = (DecimalTypeInfo) hcatFS.getTypeInfo();
                if (bd.precision() > dti.precision() || bd.scale() > dti.scale()) {
                    handleOutOfRangeValue(pigObj, hcatFS);
                    return null;
                }
                return HiveDecimal.create(bd);
            case CHAR:
                String charVal = (String) pigObj;
                CharTypeInfo cti = (CharTypeInfo) hcatFS.getTypeInfo();
                if (charVal.length() > cti.getLength()) {
                    handleOutOfRangeValue(pigObj, hcatFS);
                    return null;
                }
                return new HiveChar(charVal, cti.getLength());
            case VARCHAR:
                String varcharVal = (String) pigObj;
                VarcharTypeInfo vti = (VarcharTypeInfo) hcatFS.getTypeInfo();
                if (varcharVal.length() > vti.getLength()) {
                    handleOutOfRangeValue(pigObj, hcatFS);
                    return null;
                }
                return new HiveVarchar(varcharVal, vti.getLength());
            case TIMESTAMP:
                DateTime dt = (DateTime) pigObj;
                //getMillis() returns UTC time regardless of TZ
                return new Timestamp(dt.getMillis());
            case DATE:
                /**
         * We ignore any TZ setting on Pig value since java.sql.Date doesn't have it (in any
         * meaningful way).  So the assumption is that if Pig value has 0 time component (midnight)
         * we assume it reasonably 'fits' into a Hive DATE.  If time part is not 0, it's considered
         * out of range for target type.
         */
                DateTime dateTime = ((DateTime) pigObj);
                if (dateTime.getMillisOfDay() != 0) {
                    handleOutOfRangeValue(pigObj, hcatFS, "Time component must be 0 (midnight) in local timezone; Local TZ val='" + pigObj + "'");
                    return null;
                }
                /*java.sql.Date is a poorly defined API.  Some (all?) SerDes call toString() on it
        [e.g. LazySimpleSerDe, uses LazyUtils.writePrimitiveUTF8()],  which automatically adjusts
          for local timezone.  Date.valueOf() also uses local timezone (as does Date(int,int,int).
          Also see PigHCatUtil#extractPigObject() for corresponding read op.  This way a DATETIME from Pig,
          when stored into Hive and read back comes back with the same value.*/
                return new Date(dateTime.getYear() - 1900, dateTime.getMonthOfYear() - 1, dateTime.getDayOfMonth());
            default:
                throw new BackendException("Unexpected HCat type " + type + " for value " + pigObj + " of class " + pigObj.getClass().getName(), PigHCatUtil.PIG_EXCEPTION_CODE);
        }
    } catch (BackendException e) {
        // provide the path to the field in the error message
        throw new BackendException((hcatFS.getName() == null ? " " : hcatFS.getName() + ".") + e.getMessage(), e);
    }
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) Timestamp(java.sql.Timestamp) DateTime(org.joda.time.DateTime) HCatSchema(org.apache.hive.hcatalog.data.schema.HCatSchema) DataByteArray(org.apache.pig.data.DataByteArray) DataBag(org.apache.pig.data.DataBag) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) Date(java.sql.Date) HCatFieldSchema(org.apache.hive.hcatalog.data.schema.HCatFieldSchema) BackendException(org.apache.pig.backend.BackendException) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) DataType(org.apache.pig.data.DataType) Type(org.apache.hive.hcatalog.data.schema.HCatFieldSchema.Type) Map(java.util.Map) HashMap(java.util.HashMap) Tuple(org.apache.pig.data.Tuple)

Example 2 with Type

use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema.Type in project hive by apache.

the class HCatBaseStorer method validateSchema.

/**
   * This method encodes which Pig type can map (be stored in) to which HCat type.
   * @throws HCatException
   * @throws FrontendException
   */
private void validateSchema(FieldSchema pigField, HCatFieldSchema hcatField, Schema topLevelPigSchema, HCatSchema topLevelHCatSchema, int columnPos) throws HCatException, FrontendException {
    validateAlias(pigField.alias);
    byte type = pigField.type;
    if (DataType.isComplex(type)) {
        switch(type) {
            case DataType.MAP:
                if (hcatField != null) {
                    if (hcatField.getMapKeyType() != Type.STRING) {
                        throw new FrontendException("Key Type of map must be String " + hcatField, PigHCatUtil.PIG_EXCEPTION_CODE);
                    }
                // Map values can be primitive or complex
                }
                break;
            case DataType.BAG:
                HCatSchema arrayElementSchema = hcatField == null ? null : hcatField.getArrayElementSchema();
                for (FieldSchema innerField : pigField.schema.getField(0).schema.getFields()) {
                    validateSchema(innerField, getColFromSchema(pigField.alias, arrayElementSchema), topLevelPigSchema, topLevelHCatSchema, columnPos);
                }
                break;
            case DataType.TUPLE:
                HCatSchema structSubSchema = hcatField == null ? null : hcatField.getStructSubSchema();
                for (FieldSchema innerField : pigField.schema.getFields()) {
                    validateSchema(innerField, getColFromSchema(pigField.alias, structSubSchema), topLevelPigSchema, topLevelHCatSchema, columnPos);
                }
                break;
            default:
                throw new FrontendException("Internal Error.", PigHCatUtil.PIG_EXCEPTION_CODE);
        }
    } else if (hcatField != null) {
        //there is no point trying to validate further if we have no type info about target field
        switch(type) {
            case DataType.BIGDECIMAL:
                throwTypeMismatchException(type, Lists.newArrayList(Type.DECIMAL), hcatField, columnPos);
                break;
            case DataType.DATETIME:
                throwTypeMismatchException(type, Lists.newArrayList(Type.TIMESTAMP, Type.DATE), hcatField, columnPos);
                break;
            case DataType.BYTEARRAY:
                throwTypeMismatchException(type, Lists.newArrayList(Type.BINARY), hcatField, columnPos);
                break;
            case DataType.BIGINTEGER:
                throwTypeMismatchException(type, Collections.<Type>emptyList(), hcatField, columnPos);
                break;
            case DataType.BOOLEAN:
                throwTypeMismatchException(type, Lists.newArrayList(Type.BOOLEAN), hcatField, columnPos);
                break;
            case DataType.CHARARRAY:
                throwTypeMismatchException(type, Lists.newArrayList(Type.STRING, Type.CHAR, Type.VARCHAR), hcatField, columnPos);
                break;
            case DataType.DOUBLE:
                throwTypeMismatchException(type, Lists.newArrayList(Type.DOUBLE), hcatField, columnPos);
                break;
            case DataType.FLOAT:
                throwTypeMismatchException(type, Lists.newArrayList(Type.FLOAT), hcatField, columnPos);
                break;
            case DataType.INTEGER:
                throwTypeMismatchException(type, Lists.newArrayList(Type.INT, Type.BIGINT, Type.TINYINT, Type.SMALLINT), hcatField, columnPos);
                break;
            case DataType.LONG:
                throwTypeMismatchException(type, Lists.newArrayList(Type.BIGINT), hcatField, columnPos);
                break;
            default:
                throw new FrontendException("'" + type + "' Pig datatype in column " + columnPos + "(0-based) is not supported by HCat", PigHCatUtil.PIG_EXCEPTION_CODE);
        }
    } else {
        if (false) {
            //see HIVE-6194
            throw new FrontendException("(pigSch,hcatSchema)=(" + pigField + "," + "" + hcatField + ") (topPig, topHcat)=(" + topLevelPigSchema + "," + "" + topLevelHCatSchema + ")");
        }
    }
}
Also used : DataType(org.apache.pig.data.DataType) Type(org.apache.hive.hcatalog.data.schema.HCatFieldSchema.Type) HCatSchema(org.apache.hive.hcatalog.data.schema.HCatSchema) HCatFieldSchema(org.apache.hive.hcatalog.data.schema.HCatFieldSchema) FieldSchema(org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema) FrontendException(org.apache.pig.impl.logicalLayer.FrontendException)

Example 3 with Type

use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema.Type in project hive by apache.

the class PigHCatUtil method extractPigObject.

/**
   * Converts object from Hive's value system to Pig's value system
   * see HCatBaseStorer#getJavaObj() for Pig->Hive conversion 
   * @param o object from Hive value system
   * @return object in Pig value system 
   */
public static Object extractPigObject(Object o, HCatFieldSchema hfs) throws Exception {
    /*Note that HCatRecordSerDe.serializePrimitiveField() will be called before this, thus some
    * type promotion/conversion may occur: e.g. Short to Integer.  We should refactor this so
    * that it's hapenning in one place per module/product that we are integrating with.
    * All Pig conversion should be done here, etc.*/
    if (o == null) {
        return null;
    }
    Object result;
    Type itemType = hfs.getType();
    switch(itemType) {
        case BINARY:
            result = new DataByteArray((byte[]) o);
            break;
        case STRUCT:
            result = transformToTuple((List<?>) o, hfs);
            break;
        case ARRAY:
            result = transformToBag((List<?>) o, hfs);
            break;
        case MAP:
            result = transformToPigMap((Map<?, ?>) o, hfs);
            break;
        case DECIMAL:
            result = ((HiveDecimal) o).bigDecimalValue();
            break;
        case CHAR:
            result = ((HiveChar) o).getValue();
            break;
        case VARCHAR:
            result = ((HiveVarchar) o).getValue();
            break;
        case DATE:
            /*java.sql.Date is weird.  It automatically adjusts it's millis value to be in the local TZ
      * e.g. d = new java.sql.Date(System.currentMillis()).toString() so if you do this just after
      * midnight in Palo Alto, you'll get yesterday's date printed out.*/
            Date d = (Date) o;
            //uses local TZ
            result = new DateTime(d.getYear() + 1900, d.getMonth() + 1, d.getDate(), 0, 0);
            break;
        case TIMESTAMP:
            /*DATA TRUNCATION!!!
       Timestamp may have nanos; we'll strip those away and create a Joda DateTime
       object in local TZ; This is arbitrary, since Hive value doesn't have any TZ notion, but
       we need to set something for TZ.
       Timestamp is consistently in GMT (unless you call toString() on it) so we use millis*/
            //uses local TZ
            result = new DateTime(((Timestamp) o).getTime());
            break;
        default:
            result = o;
            break;
    }
    return result;
}
Also used : Type(org.apache.hive.hcatalog.data.schema.HCatFieldSchema.Type) DataType(org.apache.pig.data.DataType) ArrayList(java.util.ArrayList) List(java.util.List) DataByteArray(org.apache.pig.data.DataByteArray) HashMap(java.util.HashMap) Map(java.util.Map) Timestamp(java.sql.Timestamp) Date(java.sql.Date) DateTime(org.joda.time.DateTime)

Aggregations

Type (org.apache.hive.hcatalog.data.schema.HCatFieldSchema.Type)3 DataType (org.apache.pig.data.DataType)3 Date (java.sql.Date)2 Timestamp (java.sql.Timestamp)2 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 HCatFieldSchema (org.apache.hive.hcatalog.data.schema.HCatFieldSchema)2 HCatSchema (org.apache.hive.hcatalog.data.schema.HCatSchema)2 DataByteArray (org.apache.pig.data.DataByteArray)2 DateTime (org.joda.time.DateTime)2 BigDecimal (java.math.BigDecimal)1 List (java.util.List)1 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)1 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)1 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)1 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)1 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)1 BackendException (org.apache.pig.backend.BackendException)1 DataBag (org.apache.pig.data.DataBag)1