Search in sources :

Example 51 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class DbNotificationListener method onCreateDatabase.

/**
 * @param dbEvent database event
 * @throws MetaException
 */
@Override
public void onCreateDatabase(CreateDatabaseEvent dbEvent) throws MetaException {
    Database db = dbEvent.getDatabase();
    NotificationEvent event = new NotificationEvent(0, now(), EventType.CREATE_DATABASE.toString(), msgFactory.buildCreateDatabaseMessage(db).toString());
    event.setDbName(db.getName());
    process(event, dbEvent);
}
Also used : Database(org.apache.hadoop.hive.metastore.api.Database) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent)

Example 52 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class DbNotificationListener method onDropDatabase.

/**
 * @param dbEvent database event
 * @throws MetaException
 */
@Override
public void onDropDatabase(DropDatabaseEvent dbEvent) throws MetaException {
    Database db = dbEvent.getDatabase();
    NotificationEvent event = new NotificationEvent(0, now(), EventType.DROP_DATABASE.toString(), msgFactory.buildDropDatabaseMessage(db).toString());
    event.setDbName(db.getName());
    process(event, dbEvent);
}
Also used : Database(org.apache.hadoop.hive.metastore.api.Database) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent)

Example 53 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestAcidTableSerializer method testSerializeDeserialize.

@Test
public void testSerializeDeserialize() throws Exception {
    Database database = StreamingTestUtils.databaseBuilder(new File("/tmp")).name("db_1").build();
    Table table = StreamingTestUtils.tableBuilder(database).name("table_1").addColumn("one", "string").addColumn("two", "integer").partitionKeys("partition").addPartition("p1").buckets(10).build();
    AcidTable acidTable = new AcidTable("db_1", "table_1", true, TableType.SINK);
    acidTable.setTable(table);
    acidTable.setWriteId(42L);
    String encoded = AcidTableSerializer.encode(acidTable);
    System.out.println(encoded);
    AcidTable decoded = AcidTableSerializer.decode(encoded);
    assertThat(decoded.getDatabaseName(), is("db_1"));
    assertThat(decoded.getTableName(), is("table_1"));
    assertThat(decoded.createPartitions(), is(true));
    assertThat(decoded.getOutputFormatName(), is("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"));
    assertThat(decoded.getTotalBuckets(), is(10));
    assertThat(decoded.getQualifiedName(), is("DB_1.TABLE_1"));
    assertThat(decoded.getWriteId(), is(42L));
    assertThat(decoded.getTableType(), is(TableType.SINK));
    assertThat(decoded.getTable(), is(table));
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) Database(org.apache.hadoop.hive.metastore.api.Database) File(java.io.File) Test(org.junit.Test)

Example 54 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestObjectStore method testPartitionOps.

/**
 * Tests partition operations
 */
@Test
public void testPartitionOps() throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException {
    Database db1 = new Database(DB1, "description", "locationurl", null);
    objectStore.createDatabase(db1);
    StorageDescriptor sd = createFakeSd("location");
    HashMap<String, String> tableParams = new HashMap<>();
    tableParams.put("EXTERNAL", "false");
    FieldSchema partitionKey1 = new FieldSchema("Country", ColumnType.STRING_TYPE_NAME, "");
    FieldSchema partitionKey2 = new FieldSchema("State", ColumnType.STRING_TYPE_NAME, "");
    Table tbl1 = new Table(TABLE1, DB1, "owner", 1, 2, 3, sd, Arrays.asList(partitionKey1, partitionKey2), tableParams, null, null, "MANAGED_TABLE");
    objectStore.createTable(tbl1);
    HashMap<String, String> partitionParams = new HashMap<>();
    partitionParams.put("PARTITION_LEVEL_PRIVILEGE", "true");
    List<String> value1 = Arrays.asList("US", "CA");
    Partition part1 = new Partition(value1, DB1, TABLE1, 111, 111, sd, partitionParams);
    objectStore.addPartition(part1);
    List<String> value2 = Arrays.asList("US", "MA");
    Partition part2 = new Partition(value2, DB1, TABLE1, 222, 222, sd, partitionParams);
    objectStore.addPartition(part2);
    Deadline.startTimer("getPartition");
    List<Partition> partitions = objectStore.getPartitions(DB1, TABLE1, 10);
    Assert.assertEquals(2, partitions.size());
    Assert.assertEquals(111, partitions.get(0).getCreateTime());
    Assert.assertEquals(222, partitions.get(1).getCreateTime());
    int numPartitions = objectStore.getNumPartitionsByFilter(DB1, TABLE1, "");
    Assert.assertEquals(partitions.size(), numPartitions);
    numPartitions = objectStore.getNumPartitionsByFilter(DB1, TABLE1, "country = \"US\"");
    Assert.assertEquals(2, numPartitions);
    objectStore.dropPartition(DB1, TABLE1, value1);
    partitions = objectStore.getPartitions(DB1, TABLE1, 10);
    Assert.assertEquals(1, partitions.size());
    Assert.assertEquals(222, partitions.get(0).getCreateTime());
    objectStore.dropPartition(DB1, TABLE1, value2);
    objectStore.dropTable(DB1, TABLE1);
    objectStore.dropDatabase(DB1);
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Database(org.apache.hadoop.hive.metastore.api.Database) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) MetastoreUnitTest(org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest) Test(org.junit.Test)

Example 55 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestObjectStore method testDirectSqlErrorMetrics.

@Test
public void testDirectSqlErrorMetrics() throws Exception {
    Configuration conf = MetastoreConf.newMetastoreConf();
    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.METRICS_ENABLED, true);
    Metrics.initialize(conf);
    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.HIVE_CODAHALE_METRICS_REPORTER_CLASSES, "org.apache.hadoop.hive.common.metrics.metrics2.JsonFileMetricsReporter, " + "org.apache.hadoop.hive.common.metrics.metrics2.JmxMetricsReporter");
    // recall setup so that we get an object store with the metrics initalized
    setUp();
    Counter directSqlErrors = Metrics.getRegistry().getCounters().get(MetricsConstants.DIRECTSQL_ERRORS);
    objectStore.new GetDbHelper("foo", true, true) {

        @Override
        protected Database getSqlResult(ObjectStore.GetHelper<Database> ctx) throws MetaException {
            return null;
        }

        @Override
        protected Database getJdoResult(ObjectStore.GetHelper<Database> ctx) throws MetaException, NoSuchObjectException {
            return null;
        }
    }.run(false);
    Assert.assertEquals(0, directSqlErrors.getCount());
    objectStore.new GetDbHelper("foo", true, true) {

        @Override
        protected Database getSqlResult(ObjectStore.GetHelper<Database> ctx) throws MetaException {
            throw new RuntimeException();
        }

        @Override
        protected Database getJdoResult(ObjectStore.GetHelper<Database> ctx) throws MetaException, NoSuchObjectException {
            return null;
        }
    }.run(false);
    Assert.assertEquals(1, directSqlErrors.getCount());
}
Also used : Counter(com.codahale.metrics.Counter) Configuration(org.apache.hadoop.conf.Configuration) Database(org.apache.hadoop.hive.metastore.api.Database) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) MetastoreUnitTest(org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest) Test(org.junit.Test)

Aggregations

Database (org.apache.hadoop.hive.metastore.api.Database)236 Test (org.junit.Test)107 Table (org.apache.hadoop.hive.metastore.api.Table)70 ArrayList (java.util.ArrayList)51 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)39 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)39 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)37 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)36 Partition (org.apache.hadoop.hive.metastore.api.Partition)35 Path (org.apache.hadoop.fs.Path)34 IOException (java.io.IOException)29 HashMap (java.util.HashMap)27 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)26 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)24 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)23 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)22 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)22 TException (org.apache.thrift.TException)21 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)20 FileSystem (org.apache.hadoop.fs.FileSystem)17