Search in sources :

Example 11 with Decimal

use of org.apache.hadoop.hive.metastore.api.Decimal in project hive by apache.

the class StatObjectConverter method fillColumnStatisticsData.

public static void fillColumnStatisticsData(String colType, ColumnStatisticsData data, Object llow, Object lhigh, Object dlow, Object dhigh, Object declow, Object dechigh, Object nulls, Object dist, Object avglen, Object maxlen, Object trues, Object falses, Object avgLong, Object avgDouble, Object avgDecimal, Object sumDist, boolean useDensityFunctionForNDVEstimation) throws MetaException {
    colType = colType.toLowerCase();
    if (colType.equals("boolean")) {
        BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
        boolStats.setNumFalses(MetaStoreDirectSql.extractSqlLong(falses));
        boolStats.setNumTrues(MetaStoreDirectSql.extractSqlLong(trues));
        boolStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
        data.setBooleanStats(boolStats);
    } else if (colType.equals("string") || colType.startsWith("varchar") || colType.startsWith("char")) {
        StringColumnStatsData stringStats = new StringColumnStatsData();
        stringStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
        stringStats.setAvgColLen(MetaStoreDirectSql.extractSqlDouble(avglen));
        stringStats.setMaxColLen(MetaStoreDirectSql.extractSqlLong(maxlen));
        stringStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist));
        data.setStringStats(stringStats);
    } else if (colType.equals("binary")) {
        BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
        binaryStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
        binaryStats.setAvgColLen(MetaStoreDirectSql.extractSqlDouble(avglen));
        binaryStats.setMaxColLen(MetaStoreDirectSql.extractSqlLong(maxlen));
        data.setBinaryStats(binaryStats);
    } else if (colType.equals("bigint") || colType.equals("int") || colType.equals("smallint") || colType.equals("tinyint") || colType.equals("timestamp")) {
        LongColumnStatsData longStats = new LongColumnStatsData();
        longStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
        if (lhigh != null) {
            longStats.setHighValue(MetaStoreDirectSql.extractSqlLong(lhigh));
        }
        if (llow != null) {
            longStats.setLowValue(MetaStoreDirectSql.extractSqlLong(llow));
        }
        long lowerBound = MetaStoreDirectSql.extractSqlLong(dist);
        long higherBound = MetaStoreDirectSql.extractSqlLong(sumDist);
        if (useDensityFunctionForNDVEstimation && lhigh != null && llow != null && avgLong != null && MetaStoreDirectSql.extractSqlDouble(avgLong) != 0.0) {
            // We have estimation, lowerbound and higherbound. We use estimation if
            // it is between lowerbound and higherbound.
            long estimation = MetaStoreDirectSql.extractSqlLong((MetaStoreDirectSql.extractSqlLong(lhigh) - MetaStoreDirectSql.extractSqlLong(llow)) / MetaStoreDirectSql.extractSqlDouble(avgLong));
            if (estimation < lowerBound) {
                longStats.setNumDVs(lowerBound);
            } else if (estimation > higherBound) {
                longStats.setNumDVs(higherBound);
            } else {
                longStats.setNumDVs(estimation);
            }
        } else {
            longStats.setNumDVs(lowerBound);
        }
        data.setLongStats(longStats);
    } else if (colType.equals("date")) {
        DateColumnStatsData dateStats = new DateColumnStatsData();
        dateStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
        if (lhigh != null) {
            dateStats.setHighValue(new Date(MetaStoreDirectSql.extractSqlLong(lhigh)));
        }
        if (llow != null) {
            dateStats.setLowValue(new Date(MetaStoreDirectSql.extractSqlLong(llow)));
        }
        long lowerBound = MetaStoreDirectSql.extractSqlLong(dist);
        long higherBound = MetaStoreDirectSql.extractSqlLong(sumDist);
        if (useDensityFunctionForNDVEstimation && lhigh != null && llow != null && avgLong != null && MetaStoreDirectSql.extractSqlDouble(avgLong) != 0.0) {
            // We have estimation, lowerbound and higherbound. We use estimation if
            // it is between lowerbound and higherbound.
            long estimation = MetaStoreDirectSql.extractSqlLong((MetaStoreDirectSql.extractSqlLong(lhigh) - MetaStoreDirectSql.extractSqlLong(llow)) / MetaStoreDirectSql.extractSqlDouble(avgLong));
            if (estimation < lowerBound) {
                dateStats.setNumDVs(lowerBound);
            } else if (estimation > higherBound) {
                dateStats.setNumDVs(higherBound);
            } else {
                dateStats.setNumDVs(estimation);
            }
        } else {
            dateStats.setNumDVs(lowerBound);
        }
        data.setDateStats(dateStats);
    } else if (colType.equals("double") || colType.equals("float")) {
        DoubleColumnStatsData doubleStats = new DoubleColumnStatsData();
        doubleStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
        if (dhigh != null) {
            doubleStats.setHighValue(MetaStoreDirectSql.extractSqlDouble(dhigh));
        }
        if (dlow != null) {
            doubleStats.setLowValue(MetaStoreDirectSql.extractSqlDouble(dlow));
        }
        long lowerBound = MetaStoreDirectSql.extractSqlLong(dist);
        long higherBound = MetaStoreDirectSql.extractSqlLong(sumDist);
        if (useDensityFunctionForNDVEstimation && dhigh != null && dlow != null && avgDouble != null && MetaStoreDirectSql.extractSqlDouble(avgDouble) != 0.0) {
            long estimation = MetaStoreDirectSql.extractSqlLong((MetaStoreDirectSql.extractSqlLong(dhigh) - MetaStoreDirectSql.extractSqlLong(dlow)) / MetaStoreDirectSql.extractSqlDouble(avgDouble));
            if (estimation < lowerBound) {
                doubleStats.setNumDVs(lowerBound);
            } else if (estimation > higherBound) {
                doubleStats.setNumDVs(higherBound);
            } else {
                doubleStats.setNumDVs(estimation);
            }
        } else {
            doubleStats.setNumDVs(lowerBound);
        }
        data.setDoubleStats(doubleStats);
    } else if (colType.startsWith("decimal")) {
        DecimalColumnStatsData decimalStats = new DecimalColumnStatsData();
        decimalStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
        Decimal low = null;
        Decimal high = null;
        BigDecimal blow = null;
        BigDecimal bhigh = null;
        if (dechigh instanceof BigDecimal) {
            bhigh = (BigDecimal) dechigh;
            high = new Decimal(ByteBuffer.wrap(bhigh.unscaledValue().toByteArray()), (short) bhigh.scale());
        } else if (dechigh instanceof String) {
            bhigh = new BigDecimal((String) dechigh);
            high = createThriftDecimal((String) dechigh);
        }
        decimalStats.setHighValue(high);
        if (declow instanceof BigDecimal) {
            blow = (BigDecimal) declow;
            low = new Decimal(ByteBuffer.wrap(blow.unscaledValue().toByteArray()), (short) blow.scale());
        } else if (dechigh instanceof String) {
            blow = new BigDecimal((String) declow);
            low = createThriftDecimal((String) declow);
        }
        decimalStats.setLowValue(low);
        long lowerBound = MetaStoreDirectSql.extractSqlLong(dist);
        long higherBound = MetaStoreDirectSql.extractSqlLong(sumDist);
        if (useDensityFunctionForNDVEstimation && dechigh != null && declow != null && avgDecimal != null && MetaStoreDirectSql.extractSqlDouble(avgDecimal) != 0.0) {
            long estimation = MetaStoreDirectSql.extractSqlLong(MetaStoreDirectSql.extractSqlLong(bhigh.subtract(blow).floatValue() / MetaStoreDirectSql.extractSqlDouble(avgDecimal)));
            if (estimation < lowerBound) {
                decimalStats.setNumDVs(lowerBound);
            } else if (estimation > higherBound) {
                decimalStats.setNumDVs(higherBound);
            } else {
                decimalStats.setNumDVs(estimation);
            }
        } else {
            decimalStats.setNumDVs(lowerBound);
        }
        data.setDecimalStats(decimalStats);
    }
}
Also used : BooleanColumnStatsData(org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData) DoubleColumnStatsData(org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData) DecimalColumnStatsData(org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData) DateColumnStatsData(org.apache.hadoop.hive.metastore.api.DateColumnStatsData) Decimal(org.apache.hadoop.hive.metastore.api.Decimal) BigDecimal(java.math.BigDecimal) StringColumnStatsData(org.apache.hadoop.hive.metastore.api.StringColumnStatsData) LongColumnStatsData(org.apache.hadoop.hive.metastore.api.LongColumnStatsData) BinaryColumnStatsData(org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData) Date(org.apache.hadoop.hive.metastore.api.Date) BigDecimal(java.math.BigDecimal)

Example 12 with Decimal

use of org.apache.hadoop.hive.metastore.api.Decimal in project hive by apache.

the class TestHBaseStoreIntegration method tableStatistics.

@Test
public void tableStatistics() throws Exception {
    long now = System.currentTimeMillis();
    String dbname = "default";
    String tableName = "statstable";
    String boolcol = "boolcol";
    String longcol = "longcol";
    String doublecol = "doublecol";
    String stringcol = "stringcol";
    String binarycol = "bincol";
    String decimalcol = "deccol";
    long trues = 37;
    long falses = 12;
    long booleanNulls = 2;
    long longHigh = 120938479124L;
    long longLow = -12341243213412124L;
    long longNulls = 23;
    long longDVs = 213L;
    double doubleHigh = 123423.23423;
    double doubleLow = 0.00001234233;
    long doubleNulls = 92;
    long doubleDVs = 1234123421L;
    long strMaxLen = 1234;
    double strAvgLen = 32.3;
    long strNulls = 987;
    long strDVs = 906;
    long binMaxLen = 123412987L;
    double binAvgLen = 76.98;
    long binNulls = 976998797L;
    Decimal decHigh = new Decimal();
    decHigh.setScale((short) 3);
    // I have no clue how this is translated, but it
    decHigh.setUnscaled("3876".getBytes());
    // doesn't matter
    Decimal decLow = new Decimal();
    decLow.setScale((short) 3);
    decLow.setUnscaled("38".getBytes());
    long decNulls = 13;
    long decDVs = 923947293L;
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema(boolcol, "boolean", "nocomment"));
    cols.add(new FieldSchema(longcol, "long", "nocomment"));
    cols.add(new FieldSchema(doublecol, "double", "nocomment"));
    cols.add(new FieldSchema(stringcol, "varchar(32)", "nocomment"));
    cols.add(new FieldSchema(binarycol, "binary", "nocomment"));
    cols.add(new FieldSchema(decimalcol, "decimal(5, 3)", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, emptyParameters);
    Table table = new Table(tableName, dbname, "me", (int) now / 1000, (int) now / 1000, 0, sd, null, emptyParameters, null, null, null);
    store.createTable(table);
    ColumnStatistics stats = new ColumnStatistics();
    ColumnStatisticsDesc desc = new ColumnStatisticsDesc();
    desc.setLastAnalyzed(now);
    desc.setDbName(dbname);
    desc.setTableName(tableName);
    desc.setIsTblLevel(true);
    stats.setStatsDesc(desc);
    // Do one column of each type
    ColumnStatisticsObj obj = new ColumnStatisticsObj();
    obj.setColName(boolcol);
    obj.setColType("boolean");
    ColumnStatisticsData data = new ColumnStatisticsData();
    BooleanColumnStatsData boolData = new BooleanColumnStatsData();
    boolData.setNumTrues(trues);
    boolData.setNumFalses(falses);
    boolData.setNumNulls(booleanNulls);
    data.setBooleanStats(boolData);
    obj.setStatsData(data);
    stats.addToStatsObj(obj);
    obj = new ColumnStatisticsObj();
    obj.setColName(longcol);
    obj.setColType("long");
    data = new ColumnStatisticsData();
    LongColumnStatsData longData = new LongColumnStatsData();
    longData.setHighValue(longHigh);
    longData.setLowValue(longLow);
    longData.setNumNulls(longNulls);
    longData.setNumDVs(longDVs);
    data.setLongStats(longData);
    obj.setStatsData(data);
    stats.addToStatsObj(obj);
    obj = new ColumnStatisticsObj();
    obj.setColName(doublecol);
    obj.setColType("double");
    data = new ColumnStatisticsData();
    DoubleColumnStatsData doubleData = new DoubleColumnStatsData();
    doubleData.setHighValue(doubleHigh);
    doubleData.setLowValue(doubleLow);
    doubleData.setNumNulls(doubleNulls);
    doubleData.setNumDVs(doubleDVs);
    data.setDoubleStats(doubleData);
    obj.setStatsData(data);
    stats.addToStatsObj(obj);
    store.updateTableColumnStatistics(stats);
    stats = store.getTableColumnStatistics(dbname, tableName, Arrays.asList(boolcol, longcol, doublecol));
    // We'll check all of the individual values later.
    Assert.assertEquals(3, stats.getStatsObjSize());
    // check that we can fetch just some of the columns
    stats = store.getTableColumnStatistics(dbname, tableName, Arrays.asList(boolcol));
    Assert.assertEquals(1, stats.getStatsObjSize());
    stats = new ColumnStatistics();
    stats.setStatsDesc(desc);
    obj = new ColumnStatisticsObj();
    obj.setColName(stringcol);
    obj.setColType("string");
    data = new ColumnStatisticsData();
    StringColumnStatsData strData = new StringColumnStatsData();
    strData.setMaxColLen(strMaxLen);
    strData.setAvgColLen(strAvgLen);
    strData.setNumNulls(strNulls);
    strData.setNumDVs(strDVs);
    data.setStringStats(strData);
    obj.setStatsData(data);
    stats.addToStatsObj(obj);
    obj = new ColumnStatisticsObj();
    obj.setColName(binarycol);
    obj.setColType("binary");
    data = new ColumnStatisticsData();
    BinaryColumnStatsData binData = new BinaryColumnStatsData();
    binData.setMaxColLen(binMaxLen);
    binData.setAvgColLen(binAvgLen);
    binData.setNumNulls(binNulls);
    data.setBinaryStats(binData);
    obj.setStatsData(data);
    stats.addToStatsObj(obj);
    obj = new ColumnStatisticsObj();
    obj.setColName(decimalcol);
    obj.setColType("decimal(5,3)");
    data = new ColumnStatisticsData();
    DecimalColumnStatsData decData = new DecimalColumnStatsData();
    LOG.debug("Setting decimal high value to " + decHigh.getScale() + " <" + new String(decHigh.getUnscaled()) + ">");
    decData.setHighValue(decHigh);
    decData.setLowValue(decLow);
    decData.setNumNulls(decNulls);
    decData.setNumDVs(decDVs);
    data.setDecimalStats(decData);
    obj.setStatsData(data);
    stats.addToStatsObj(obj);
    store.updateTableColumnStatistics(stats);
    stats = store.getTableColumnStatistics(dbname, tableName, Arrays.asList(boolcol, longcol, doublecol, stringcol, binarycol, decimalcol));
    Assert.assertEquals(now, stats.getStatsDesc().getLastAnalyzed());
    Assert.assertEquals(dbname, stats.getStatsDesc().getDbName());
    Assert.assertEquals(tableName, stats.getStatsDesc().getTableName());
    Assert.assertTrue(stats.getStatsDesc().isIsTblLevel());
    Assert.assertEquals(6, stats.getStatsObjSize());
    ColumnStatisticsData colData = stats.getStatsObj().get(0).getStatsData();
    Assert.assertEquals(ColumnStatisticsData._Fields.BOOLEAN_STATS, colData.getSetField());
    boolData = colData.getBooleanStats();
    Assert.assertEquals(trues, boolData.getNumTrues());
    Assert.assertEquals(falses, boolData.getNumFalses());
    Assert.assertEquals(booleanNulls, boolData.getNumNulls());
    colData = stats.getStatsObj().get(1).getStatsData();
    Assert.assertEquals(ColumnStatisticsData._Fields.LONG_STATS, colData.getSetField());
    longData = colData.getLongStats();
    Assert.assertEquals(longHigh, longData.getHighValue());
    Assert.assertEquals(longLow, longData.getLowValue());
    Assert.assertEquals(longNulls, longData.getNumNulls());
    Assert.assertEquals(longDVs, longData.getNumDVs());
    colData = stats.getStatsObj().get(2).getStatsData();
    Assert.assertEquals(ColumnStatisticsData._Fields.DOUBLE_STATS, colData.getSetField());
    doubleData = colData.getDoubleStats();
    Assert.assertEquals(doubleHigh, doubleData.getHighValue(), 0.01);
    Assert.assertEquals(doubleLow, doubleData.getLowValue(), 0.01);
    Assert.assertEquals(doubleNulls, doubleData.getNumNulls());
    Assert.assertEquals(doubleDVs, doubleData.getNumDVs());
    colData = stats.getStatsObj().get(3).getStatsData();
    Assert.assertEquals(ColumnStatisticsData._Fields.STRING_STATS, colData.getSetField());
    strData = colData.getStringStats();
    Assert.assertEquals(strMaxLen, strData.getMaxColLen());
    Assert.assertEquals(strAvgLen, strData.getAvgColLen(), 0.01);
    Assert.assertEquals(strNulls, strData.getNumNulls());
    Assert.assertEquals(strDVs, strData.getNumDVs());
    colData = stats.getStatsObj().get(4).getStatsData();
    Assert.assertEquals(ColumnStatisticsData._Fields.BINARY_STATS, colData.getSetField());
    binData = colData.getBinaryStats();
    Assert.assertEquals(binMaxLen, binData.getMaxColLen());
    Assert.assertEquals(binAvgLen, binData.getAvgColLen(), 0.01);
    Assert.assertEquals(binNulls, binData.getNumNulls());
    colData = stats.getStatsObj().get(5).getStatsData();
    Assert.assertEquals(ColumnStatisticsData._Fields.DECIMAL_STATS, colData.getSetField());
    decData = colData.getDecimalStats();
    Assert.assertEquals(decHigh, decData.getHighValue());
    Assert.assertEquals(decLow, decData.getLowValue());
    Assert.assertEquals(decNulls, decData.getNumNulls());
    Assert.assertEquals(decDVs, decData.getNumDVs());
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) BooleanColumnStatsData(org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) StringColumnStatsData(org.apache.hadoop.hive.metastore.api.StringColumnStatsData) LongColumnStatsData(org.apache.hadoop.hive.metastore.api.LongColumnStatsData) BinaryColumnStatsData(org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) DoubleColumnStatsData(org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData) DecimalColumnStatsData(org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData) Decimal(org.apache.hadoop.hive.metastore.api.Decimal) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData) Test(org.junit.Test)

Example 13 with Decimal

use of org.apache.hadoop.hive.metastore.api.Decimal in project trino by trinodb.

the class GlueStatConverter method bigDecimalToGlueDecimal.

private static DecimalNumber bigDecimalToGlueDecimal(BigDecimal decimal) {
    Decimal hiveDecimal = new Decimal((short) decimal.scale(), ByteBuffer.wrap(decimal.unscaledValue().toByteArray()));
    DecimalNumber catalogDecimal = new DecimalNumber();
    catalogDecimal.setUnscaledValue(ByteBuffer.wrap(hiveDecimal.getUnscaled()));
    catalogDecimal.setScale((int) hiveDecimal.getScale());
    return catalogDecimal;
}
Also used : DecimalNumber(com.amazonaws.services.glue.model.DecimalNumber) BigDecimal(java.math.BigDecimal) Decimal(org.apache.hadoop.hive.metastore.api.Decimal)

Example 14 with Decimal

use of org.apache.hadoop.hive.metastore.api.Decimal in project trino by trinodb.

the class GlueStatConverter method glueDecimalToBigDecimal.

private static Optional<BigDecimal> glueDecimalToBigDecimal(DecimalNumber catalogDecimal) {
    if (catalogDecimal == null) {
        return Optional.empty();
    }
    Decimal decimal = new Decimal();
    decimal.setUnscaled(catalogDecimal.getUnscaledValue());
    decimal.setScale(catalogDecimal.getScale().shortValue());
    return Optional.of(new BigDecimal(new BigInteger(decimal.getUnscaled()), decimal.getScale()));
}
Also used : BigDecimal(java.math.BigDecimal) Decimal(org.apache.hadoop.hive.metastore.api.Decimal) BigInteger(java.math.BigInteger) BigDecimal(java.math.BigDecimal)

Example 15 with Decimal

use of org.apache.hadoop.hive.metastore.api.Decimal in project flink-mirror by flink-ci.

the class HiveStatsUtil method toThriftDecimal.

private static Decimal toThriftDecimal(HiveDecimal hiveDecimal) {
    // the constructor signature changed in 3.x. use default constructor and set each field...
    Decimal res = new Decimal();
    res.setUnscaled(ByteBuffer.wrap(hiveDecimal.unscaledValue().toByteArray()));
    res.setScale((short) hiveDecimal.scale());
    return res;
}
Also used : BigDecimal(java.math.BigDecimal) Decimal(org.apache.hadoop.hive.metastore.api.Decimal) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Aggregations

Decimal (org.apache.hadoop.hive.metastore.api.Decimal)18 BigDecimal (java.math.BigDecimal)11 DecimalColumnStatsData (org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData)7 ColumnStatisticsData (org.apache.hadoop.hive.metastore.api.ColumnStatisticsData)6 ColumnStatisticsObj (org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj)5 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)4 BigInteger (java.math.BigInteger)3 BinaryColumnStatsData (org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData)3 BooleanColumnStatsData (org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData)3 DoubleColumnStatsData (org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData)3 LongColumnStatsData (org.apache.hadoop.hive.metastore.api.LongColumnStatsData)3 StringColumnStatsData (org.apache.hadoop.hive.metastore.api.StringColumnStatsData)3 DecimalNumber (com.amazonaws.services.glue.model.DecimalNumber)2 ByteBuffer (java.nio.ByteBuffer)2 BinaryColumnStatisticsData (com.amazonaws.services.glue.model.BinaryColumnStatisticsData)1 BooleanColumnStatisticsData (com.amazonaws.services.glue.model.BooleanColumnStatisticsData)1 ColumnStatistics (com.amazonaws.services.glue.model.ColumnStatistics)1 ColumnStatisticsData (com.amazonaws.services.glue.model.ColumnStatisticsData)1 ColumnStatisticsType (com.amazonaws.services.glue.model.ColumnStatisticsType)1 DateColumnStatisticsData (com.amazonaws.services.glue.model.DateColumnStatisticsData)1