Search in sources :

Example 66 with PartitionSpecProxy

use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.

the class TestListPartitions method testListPartitionSpecsByFilterNullFilter.

@Test
public void testListPartitionSpecsByFilterNullFilter() throws Exception {
    List<List<String>> values = createTable4PartColsParts(client).testValues;
    PartitionSpecProxy pproxy = client.listPartitionSpecsByFilter(DB_NAME, TABLE_NAME, null, -1);
    assertPartitionsSpecProxy(pproxy, values);
}
Also used : ArrayList(java.util.ArrayList) List(java.util.List) PartitionSpecProxy(org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 67 with PartitionSpecProxy

use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.

the class TestListPartitions method testListPartitionsSpecsByFilter.

/**
 * Testing listPartitionSpecsByFilter(String,String,String,int) ->
 *         get_part_specs_by_filter(String,String,String,int).
 */
@Test
public void testListPartitionsSpecsByFilter() throws Exception {
    List<List<String>> testValues = createTable4PartColsParts(client).testValues;
    PartitionSpecProxy partSpecProxy = client.listPartitionSpecsByFilter(DB_NAME, TABLE_NAME, "yyyy=\"2017\" OR " + "mm=\"02\"", -1);
    assertPartitionsSpecProxy(partSpecProxy, testValues.subList(1, 4));
    partSpecProxy = client.listPartitionSpecsByFilter(DB_NAME, TABLE_NAME, "yyyy=\"2017\" OR " + "mm=\"02\"", 2);
    assertPartitionsSpecProxy(partSpecProxy, testValues.subList(1, 3));
    partSpecProxy = client.listPartitionSpecsByFilter(DB_NAME, TABLE_NAME, "yyyy=\"2017\" OR " + "mm=\"02\"", 0);
    assertPartitionsSpecProxy(partSpecProxy, Lists.newArrayList());
    partSpecProxy = client.listPartitionSpecsByFilter(DB_NAME, TABLE_NAME, "yyyy=\"20177\"", -1);
    assertPartitionsSpecProxy(partSpecProxy, Lists.newArrayList());
    // HIVE-18977
    if (MetastoreConf.getBoolVar(metaStore.getConf(), MetastoreConf.ConfVars.TRY_DIRECT_SQL)) {
        partSpecProxy = client.listPartitionSpecsByFilter(DB_NAME, TABLE_NAME, "yYyY=\"2017\"", -1);
        assertPartitionsSpecProxy(partSpecProxy, testValues.subList(2, 4));
    }
    partSpecProxy = client.listPartitionSpecsByFilter(DB_NAME, TABLE_NAME, "yyyy=\"2017\" AND mm=\"99\"", -1);
    assertPartitionsSpecProxy(partSpecProxy, Lists.newArrayList());
}
Also used : ArrayList(java.util.ArrayList) List(java.util.List) PartitionSpecProxy(org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 68 with PartitionSpecProxy

use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project metacat by Netflix.

the class CatalogThriftHiveMetastore method add_partitions_pspec.

/**
 * {@inheritDoc}
 */
@Override
public int add_partitions_pspec(final List<PartitionSpec> newParts) throws TException {
    if (newParts == null || newParts.isEmpty()) {
        return 0;
    }
    final String dbName = newParts.get(0).getDbName();
    final String tableName = newParts.get(0).getTableName();
    return requestWrapper("add_partition", new Object[] { dbName, tableName }, () -> {
        final PartitionSpecProxy partitionSpecProxy = PartitionSpecProxy.Factory.get(newParts);
        final PartitionSpecProxy.PartitionIterator partitionIterator = partitionSpecProxy.getPartitionIterator();
        final List<Partition> partitions = addPartitionsCore(dbName, tableName, Lists.newArrayList(partitionIterator), false);
        return partitions.size();
    });
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) PartitionSpecProxy(org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy)

Example 69 with PartitionSpecProxy

use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project flink by apache.

the class HiveCatalog method listPartitionsByFilter.

@Override
public List<CatalogPartitionSpec> listPartitionsByFilter(ObjectPath tablePath, List<Expression> expressions) throws TableNotExistException, TableNotPartitionedException, CatalogException {
    Table hiveTable = getHiveTable(tablePath);
    ensurePartitionedTable(tablePath, hiveTable);
    List<String> partColNames = getFieldNames(hiveTable.getPartitionKeys());
    Optional<String> filter = HiveTableUtil.makePartitionFilter(HiveTableUtil.getNonPartitionFields(hiveConf, hiveTable, hiveShim).size(), partColNames, expressions, hiveShim);
    if (!filter.isPresent()) {
        throw new UnsupportedOperationException("HiveCatalog is unable to handle the partition filter expressions: " + expressions);
    }
    try {
        PartitionSpecProxy partitionSpec = client.listPartitionSpecsByFilter(tablePath.getDatabaseName(), tablePath.getObjectName(), filter.get(), (short) -1);
        List<CatalogPartitionSpec> res = new ArrayList<>(partitionSpec.size());
        PartitionSpecProxy.PartitionIterator partitions = partitionSpec.getPartitionIterator();
        while (partitions.hasNext()) {
            Partition partition = partitions.next();
            Map<String, String> spec = new HashMap<>();
            for (int i = 0; i < partColNames.size(); i++) {
                spec.put(partColNames.get(i), partition.getValues().get(i));
            }
            res.add(new CatalogPartitionSpec(spec));
        }
        return res;
    } catch (TException e) {
        throw new UnsupportedOperationException("Failed to list partition by filter from HMS, filter expressions: " + expressions, e);
    }
}
Also used : TException(org.apache.thrift.TException) Partition(org.apache.hadoop.hive.metastore.api.Partition) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.metastore.api.Table) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) PartitionSpecProxy(org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec)

Aggregations

PartitionSpecProxy (org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy)69 Test (org.junit.Test)60 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)59 Partition (org.apache.hadoop.hive.metastore.api.Partition)53 Table (org.apache.hadoop.hive.metastore.api.Table)24 PartitionWithoutSD (org.apache.hadoop.hive.metastore.api.PartitionWithoutSD)15 ArrayList (java.util.ArrayList)13 Path (org.apache.hadoop.fs.Path)11 List (java.util.List)7 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)7 HashMap (java.util.HashMap)4 PartitionSpec (org.apache.hadoop.hive.metastore.api.PartitionSpec)4 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)3 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)3 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)3 CompositePartitionSpecProxy (org.apache.hadoop.hive.metastore.partition.spec.CompositePartitionSpecProxy)3 PartitionSpecWithSharedSD (org.apache.hadoop.hive.metastore.api.PartitionSpecWithSharedSD)2 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)2 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)2 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)2