Search in sources :

Example 11 with Timestamp

use of java.sql.Timestamp in project sharding-jdbc by dangdangdotcom.

the class PreparedStatementAdapterTest method assertSetTimestamp.

@Test
public void assertSetTimestamp() throws SQLException {
    Timestamp now = new Timestamp(0L);
    actual.setTimestamp(1, now);
    actual.setTimestamp(2, now, Calendar.getInstance());
    assertParameter(actual, 1, now);
    assertParameter(actual, 2, now);
}
Also used : Timestamp(java.sql.Timestamp) AbstractShardingDataBasesOnlyDBUnitTest(com.dangdang.ddframe.rdb.integrate.db.AbstractShardingDataBasesOnlyDBUnitTest) Test(org.junit.Test)

Example 12 with Timestamp

use of java.sql.Timestamp in project hive by apache.

the class GenericUDFDateDiff method convertToDate.

private Date convertToDate(PrimitiveCategory inputType, Converter converter, DeferredObject argument) throws HiveException {
    assert (converter != null);
    assert (argument != null);
    if (argument.get() == null) {
        return null;
    }
    Date date = new Date(0);
    switch(inputType) {
        case STRING:
        case VARCHAR:
        case CHAR:
            String dateString = converter.convert(argument.get()).toString();
            try {
                date.setTime(formatter.parse(dateString).getTime());
            } catch (ParseException e) {
                return null;
            }
            break;
        case TIMESTAMP:
            Timestamp ts = ((TimestampWritable) converter.convert(argument.get())).getTimestamp();
            date.setTime(ts.getTime());
            break;
        case DATE:
            DateWritable dw = (DateWritable) converter.convert(argument.get());
            date = dw.get();
            break;
        default:
            throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
    }
    return date;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) ParseException(java.text.ParseException) Timestamp(java.sql.Timestamp) Date(java.sql.Date)

Example 13 with Timestamp

use of java.sql.Timestamp in project hive by apache.

the class TestDefaultHCatRecord method getHCat13TypesComplexRecord.

private static HCatRecord getHCat13TypesComplexRecord() {
    List<Object> rec_hcat13ComplexTypes = new ArrayList<Object>();
    Map<HiveDecimal, String> m = new HashMap<HiveDecimal, String>();
    m.put(HiveDecimal.create(new BigDecimal("1234.12")), "1234.12");
    m.put(HiveDecimal.create(new BigDecimal("1234.13")), "1234.13");
    rec_hcat13ComplexTypes.add(m);
    Map<Timestamp, List<Object>> m2 = new HashMap<Timestamp, List<Object>>();
    List<Object> list = new ArrayList<Object>();
    list.add(Date.valueOf("2014-01-05"));
    list.add(new HashMap<HiveDecimal, String>(m));
    m2.put(new Timestamp(System.currentTimeMillis()), list);
    rec_hcat13ComplexTypes.add(m2);
    return new DefaultHCatRecord(rec_hcat13ComplexTypes);
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Timestamp(java.sql.Timestamp) BigDecimal(java.math.BigDecimal) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ArrayList(java.util.ArrayList) List(java.util.List)

Example 14 with Timestamp

use of java.sql.Timestamp in project hive by apache.

the class VectorizedBatchUtil method debugFormatOneRow.

public static StringBuilder debugFormatOneRow(VectorizedRowBatch batch, int index, String prefix, StringBuilder sb) {
    sb.append(prefix + " row " + index + " ");
    for (int p = 0; p < batch.projectionSize; p++) {
        int column = batch.projectedColumns[p];
        sb.append("(" + p + "," + column + ") ");
        ColumnVector colVector = batch.cols[column];
        if (colVector == null) {
            sb.append("(null ColumnVector)");
        } else {
            boolean isRepeating = colVector.isRepeating;
            if (isRepeating) {
                sb.append("(repeating)");
            }
            index = (isRepeating ? 0 : index);
            if (colVector.noNulls || !colVector.isNull[index]) {
                if (colVector instanceof LongColumnVector) {
                    sb.append(((LongColumnVector) colVector).vector[index]);
                } else if (colVector instanceof DoubleColumnVector) {
                    sb.append(((DoubleColumnVector) colVector).vector[index]);
                } else if (colVector instanceof BytesColumnVector) {
                    BytesColumnVector bytesColumnVector = (BytesColumnVector) colVector;
                    byte[] bytes = bytesColumnVector.vector[index];
                    int start = bytesColumnVector.start[index];
                    int length = bytesColumnVector.length[index];
                    if (bytes == null) {
                        sb.append("(Unexpected null bytes with start " + start + " length " + length + ")");
                    } else {
                        sb.append("bytes: '" + displayBytes(bytes, start, length) + "'");
                    }
                } else if (colVector instanceof DecimalColumnVector) {
                    sb.append(((DecimalColumnVector) colVector).vector[index].toString());
                } else if (colVector instanceof TimestampColumnVector) {
                    Timestamp timestamp = new Timestamp(0);
                    ((TimestampColumnVector) colVector).timestampUpdate(timestamp, index);
                    sb.append(timestamp.toString());
                } else if (colVector instanceof IntervalDayTimeColumnVector) {
                    HiveIntervalDayTime intervalDayTime = ((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(index);
                    sb.append(intervalDayTime.toString());
                } else {
                    sb.append("Unknown");
                }
            } else {
                sb.append("NULL");
            }
        }
        sb.append(" ");
    }
    return sb;
}
Also used : Timestamp(java.sql.Timestamp) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 15 with Timestamp

use of java.sql.Timestamp in project hive by apache.

the class VectorizedRowBatchCtx method addPartitionColsToBatch.

/**
   * Add the partition values to the batch
   *
   * @param batch
   * @param partitionValues
   * @throws HiveException
   */
public void addPartitionColsToBatch(VectorizedRowBatch batch, Object[] partitionValues) {
    if (partitionValues != null) {
        for (int i = 0; i < partitionColumnCount; i++) {
            Object value = partitionValues[i];
            int colIndex = dataColumnCount + i;
            String partitionColumnName = rowColumnNames[colIndex];
            PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) rowColumnTypeInfos[colIndex];
            switch(primitiveTypeInfo.getPrimitiveCategory()) {
                case BOOLEAN:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Boolean) value == true ? 1 : 0);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case BYTE:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Byte) value);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case SHORT:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Short) value);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case INT:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Integer) value);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case LONG:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Long) value);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case DATE:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill(DateWritable.dateToDays((Date) value));
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case TIMESTAMP:
                    {
                        TimestampColumnVector lcv = (TimestampColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Timestamp) value);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case INTERVAL_YEAR_MONTH:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill(((HiveIntervalYearMonth) value).getTotalMonths());
                            lcv.isNull[0] = false;
                        }
                    }
                case INTERVAL_DAY_TIME:
                    {
                        IntervalDayTimeColumnVector icv = (IntervalDayTimeColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            icv.noNulls = false;
                            icv.isNull[0] = true;
                            icv.isRepeating = true;
                        } else {
                            icv.fill(((HiveIntervalDayTime) value));
                            icv.isNull[0] = false;
                        }
                    }
                case FLOAT:
                    {
                        DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            dcv.noNulls = false;
                            dcv.isNull[0] = true;
                            dcv.isRepeating = true;
                        } else {
                            dcv.fill((Float) value);
                            dcv.isNull[0] = false;
                        }
                    }
                    break;
                case DOUBLE:
                    {
                        DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            dcv.noNulls = false;
                            dcv.isNull[0] = true;
                            dcv.isRepeating = true;
                        } else {
                            dcv.fill((Double) value);
                            dcv.isNull[0] = false;
                        }
                    }
                    break;
                case DECIMAL:
                    {
                        DecimalColumnVector dv = (DecimalColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            dv.noNulls = false;
                            dv.isNull[0] = true;
                            dv.isRepeating = true;
                        } else {
                            HiveDecimal hd = (HiveDecimal) value;
                            dv.set(0, hd);
                            dv.isRepeating = true;
                            dv.isNull[0] = false;
                        }
                    }
                    break;
                case BINARY:
                    {
                        BytesColumnVector bcv = (BytesColumnVector) batch.cols[colIndex];
                        byte[] bytes = (byte[]) value;
                        if (bytes == null) {
                            bcv.noNulls = false;
                            bcv.isNull[0] = true;
                            bcv.isRepeating = true;
                        } else {
                            bcv.fill(bytes);
                            bcv.isNull[0] = false;
                        }
                    }
                    break;
                case STRING:
                case CHAR:
                case VARCHAR:
                    {
                        BytesColumnVector bcv = (BytesColumnVector) batch.cols[colIndex];
                        String sVal = value.toString();
                        if (sVal == null) {
                            bcv.noNulls = false;
                            bcv.isNull[0] = true;
                            bcv.isRepeating = true;
                        } else {
                            bcv.setVal(0, sVal.getBytes());
                            bcv.isRepeating = true;
                        }
                    }
                    break;
                default:
                    throw new RuntimeException("Unable to recognize the partition type " + primitiveTypeInfo.getPrimitiveCategory() + " for column " + partitionColumnName);
            }
        }
    }
}
Also used : Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Aggregations

Timestamp (java.sql.Timestamp)3153 Test (org.junit.Test)526 Date (java.util.Date)458 PreparedStatement (java.sql.PreparedStatement)450 SQLException (java.sql.SQLException)367 ResultSet (java.sql.ResultSet)353 BigDecimal (java.math.BigDecimal)351 ArrayList (java.util.ArrayList)236 Date (java.sql.Date)218 Connection (java.sql.Connection)216 HashMap (java.util.HashMap)201 GenericValue (org.apache.ofbiz.entity.GenericValue)194 Calendar (java.util.Calendar)184 Time (java.sql.Time)173 GenericEntityException (org.apache.ofbiz.entity.GenericEntityException)167 Delegator (org.apache.ofbiz.entity.Delegator)157 SimpleDateFormat (java.text.SimpleDateFormat)150 IOException (java.io.IOException)129 Locale (java.util.Locale)129 Map (java.util.Map)111