Search in sources :

Example 26 with Table

use of org.apache.hadoop.hive.metastore.api.Table in project hive by apache.

the class TestMutations method testTransactionBatchEmptyCommitPartitioned.

@Test
public void testTransactionBatchEmptyCommitPartitioned() throws Exception {
    Table table = partitionedTableBuilder.addPartition(ASIA_INDIA).create(metaStoreClient);
    MutatorClient client = new MutatorClientBuilder().addSinkTable(table.getDbName(), table.getTableName(), true).metaStoreUri(metaStoreUri).build();
    client.connect();
    Transaction transaction = client.newTransaction();
    transaction.begin();
    transaction.commit();
    assertThat(transaction.getState(), is(COMMITTED));
    client.close();
}
Also used : AcidTable(org.apache.hive.hcatalog.streaming.mutate.client.AcidTable) Table(org.apache.hadoop.hive.metastore.api.Table) Transaction(org.apache.hive.hcatalog.streaming.mutate.client.Transaction) MutatorClient(org.apache.hive.hcatalog.streaming.mutate.client.MutatorClient) MutatorClientBuilder(org.apache.hive.hcatalog.streaming.mutate.client.MutatorClientBuilder) Test(org.junit.Test)

Example 27 with Table

use of org.apache.hadoop.hive.metastore.api.Table in project hive by apache.

the class TestMutations method testTransactionBatchEmptyAbortPartitioned.

@Test
public void testTransactionBatchEmptyAbortPartitioned() throws Exception {
    Table table = partitionedTableBuilder.addPartition(ASIA_INDIA).create(metaStoreClient);
    MutatorClient client = new MutatorClientBuilder().addSinkTable(table.getDbName(), table.getTableName(), true).metaStoreUri(metaStoreUri).build();
    client.connect();
    Transaction transaction = client.newTransaction();
    List<AcidTable> destinations = client.getTables();
    transaction.begin();
    MutatorFactory mutatorFactory = new ReflectiveMutatorFactory(conf, MutableRecord.class, RECORD_ID_COLUMN, BUCKET_COLUMN_INDEXES);
    MutatorCoordinator coordinator = new MutatorCoordinatorBuilder().metaStoreUri(metaStoreUri).table(destinations.get(0)).mutatorFactory(mutatorFactory).build();
    coordinator.close();
    transaction.abort();
    assertThat(transaction.getState(), is(ABORTED));
    client.close();
}
Also used : AcidTable(org.apache.hive.hcatalog.streaming.mutate.client.AcidTable) Table(org.apache.hadoop.hive.metastore.api.Table) MutatorFactory(org.apache.hive.hcatalog.streaming.mutate.worker.MutatorFactory) Transaction(org.apache.hive.hcatalog.streaming.mutate.client.Transaction) AcidTable(org.apache.hive.hcatalog.streaming.mutate.client.AcidTable) MutatorCoordinatorBuilder(org.apache.hive.hcatalog.streaming.mutate.worker.MutatorCoordinatorBuilder) MutatorCoordinator(org.apache.hive.hcatalog.streaming.mutate.worker.MutatorCoordinator) MutatorClient(org.apache.hive.hcatalog.streaming.mutate.client.MutatorClient) MutatorClientBuilder(org.apache.hive.hcatalog.streaming.mutate.client.MutatorClientBuilder) Test(org.junit.Test)

Example 28 with Table

use of org.apache.hadoop.hive.metastore.api.Table in project hive by apache.

the class TestAcidTableSerializer method testSerializeDeserialize.

@Test
public void testSerializeDeserialize() throws Exception {
    Database database = StreamingTestUtils.databaseBuilder(new File("/tmp")).name("db_1").build();
    Table table = StreamingTestUtils.tableBuilder(database).name("table_1").addColumn("one", "string").addColumn("two", "integer").partitionKeys("partition").addPartition("p1").buckets(10).build();
    AcidTable acidTable = new AcidTable("db_1", "table_1", true, TableType.SINK);
    acidTable.setTable(table);
    acidTable.setTransactionId(42L);
    String encoded = AcidTableSerializer.encode(acidTable);
    System.out.println(encoded);
    AcidTable decoded = AcidTableSerializer.decode(encoded);
    assertThat(decoded.getDatabaseName(), is("db_1"));
    assertThat(decoded.getTableName(), is("table_1"));
    assertThat(decoded.createPartitions(), is(true));
    assertThat(decoded.getOutputFormatName(), is("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"));
    assertThat(decoded.getTotalBuckets(), is(10));
    assertThat(decoded.getQualifiedName(), is("DB_1.TABLE_1"));
    assertThat(decoded.getTransactionId(), is(42L));
    assertThat(decoded.getTableType(), is(TableType.SINK));
    assertThat(decoded.getTable(), is(table));
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) Database(org.apache.hadoop.hive.metastore.api.Database) File(java.io.File) Test(org.junit.Test)

Example 29 with Table

use of org.apache.hadoop.hive.metastore.api.Table in project hive by apache.

the class HCatClientHMSImpl method dropPartitions.

@Override
public void dropPartitions(String dbName, String tableName, Map<String, String> partitionSpec, boolean ifExists, boolean deleteData) throws HCatException {
    LOG.info("HCatClient dropPartitions(db=" + dbName + ",table=" + tableName + ", partitionSpec: [" + partitionSpec + "]).");
    try {
        dbName = checkDB(dbName);
        Table table = hmsClient.getTable(dbName, tableName);
        if (hiveConfig.getBoolVar(HiveConf.ConfVars.METASTORE_CLIENT_DROP_PARTITIONS_WITH_EXPRESSIONS)) {
            try {
                dropPartitionsUsingExpressions(table, partitionSpec, ifExists, deleteData);
            } catch (SemanticException parseFailure) {
                LOG.warn("Could not push down partition-specification to back-end, for dropPartitions(). Resorting to iteration.", parseFailure);
                dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists, deleteData);
            }
        } else {
            // Not using expressions.
            dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists, deleteData);
        }
    } catch (NoSuchObjectException e) {
        throw new ObjectNotFoundException("NoSuchObjectException while dropping partition. " + "Either db(" + dbName + ") or table(" + tableName + ") missing.", e);
    } catch (MetaException e) {
        throw new HCatException("MetaException while dropping partition.", e);
    } catch (TException e) {
        throw new ConnectionFailureException("TException while dropping partition.", e);
    }
}
Also used : TException(org.apache.thrift.TException) Table(org.apache.hadoop.hive.metastore.api.Table) HCatException(org.apache.hive.hcatalog.common.HCatException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 30 with Table

use of org.apache.hadoop.hive.metastore.api.Table in project hive by apache.

the class TestHeartbeatTimerTask method createTable.

private static List<Table> createTable() {
    Table table = new Table();
    table.setDbName("DB");
    table.setTableName("TABLE");
    return Arrays.asList(table);
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table)

Aggregations

Table (org.apache.hadoop.hive.metastore.api.Table)355 Test (org.junit.Test)198 ArrayList (java.util.ArrayList)169 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)143 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)139 Partition (org.apache.hadoop.hive.metastore.api.Partition)126 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)123 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)56 HashMap (java.util.HashMap)55 Path (org.apache.hadoop.fs.Path)53 Database (org.apache.hadoop.hive.metastore.api.Database)53 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)48 ShowCompactRequest (org.apache.hadoop.hive.metastore.api.ShowCompactRequest)48 ShowCompactResponse (org.apache.hadoop.hive.metastore.api.ShowCompactResponse)48 TException (org.apache.thrift.TException)41 CompactionRequest (org.apache.hadoop.hive.metastore.api.CompactionRequest)40 List (java.util.List)38 ShowCompactResponseElement (org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement)35 FileSystem (org.apache.hadoop.fs.FileSystem)31 ColumnStatisticsDesc (org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc)31