Search in sources :

Example 11 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestHiveMetaStoreSchemaMethods method addSchemaVersionOtherDb.

@Test
public void addSchemaVersionOtherDb() throws TException {
    String catName = "other_cat_for_schema_version";
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
    client.createCatalog(cat);
    String dbName = "other_db_for_schema_version";
    Database db = new DatabaseBuilder().setName(dbName).setCatalogName(catName).create(client, conf);
    String schemaName = uniqueSchemaName();
    int version = 1;
    ISchema schema = new ISchemaBuilder().inDb(db).setSchemaType(SchemaType.AVRO).setName(schemaName).build();
    client.createISchema(schema);
    String description = "very descriptive";
    String schemaText = "this should look like json, but oh well";
    String fingerprint = "this should be an md5 string";
    String versionName = "why would I name a version?";
    long creationTime = 10;
    String serdeName = "serde_for_schema37";
    String serializer = "org.apache.hadoop.hive.metastore.test.Serializer";
    String deserializer = "org.apache.hadoop.hive.metastore.test.Deserializer";
    String serdeDescription = "how do you describe a serde?";
    SchemaVersion schemaVersion = new SchemaVersionBuilder().versionOf(schema).setVersion(version).addCol("a", ColumnType.INT_TYPE_NAME).addCol("b", ColumnType.FLOAT_TYPE_NAME).setCreatedAt(creationTime).setState(SchemaVersionState.INITIATED).setDescription(description).setSchemaText(schemaText).setFingerprint(fingerprint).setName(versionName).setSerdeName(serdeName).setSerdeSerializerClass(serializer).setSerdeDeserializerClass(deserializer).setSerdeDescription(serdeDescription).build();
    client.addSchemaVersion(schemaVersion);
    schemaVersion = client.getSchemaVersion(catName, dbName, schemaName, version);
    Assert.assertNotNull(schemaVersion);
    Assert.assertEquals(schemaName, schemaVersion.getSchema().getSchemaName());
    Assert.assertEquals(dbName, schemaVersion.getSchema().getDbName());
    Assert.assertEquals(catName, schemaVersion.getSchema().getCatName());
    Assert.assertEquals(version, schemaVersion.getVersion());
    Assert.assertEquals(creationTime, schemaVersion.getCreatedAt());
    Assert.assertEquals(SchemaVersionState.INITIATED, schemaVersion.getState());
    Assert.assertEquals(description, schemaVersion.getDescription());
    Assert.assertEquals(schemaText, schemaVersion.getSchemaText());
    Assert.assertEquals(fingerprint, schemaVersion.getFingerprint());
    Assert.assertEquals(versionName, schemaVersion.getName());
    Assert.assertEquals(serdeName, schemaVersion.getSerDe().getName());
    Assert.assertEquals(serializer, schemaVersion.getSerDe().getSerializerClass());
    Assert.assertEquals(deserializer, schemaVersion.getSerDe().getDeserializerClass());
    Assert.assertEquals(serdeDescription, schemaVersion.getSerDe().getDescription());
    Assert.assertEquals(2, schemaVersion.getColsSize());
    List<FieldSchema> cols = schemaVersion.getCols();
    Collections.sort(cols);
    Assert.assertEquals("a", cols.get(0).getName());
    Assert.assertEquals(ColumnType.INT_TYPE_NAME, cols.get(0).getType());
    Assert.assertEquals("b", cols.get(1).getName());
    Assert.assertEquals(ColumnType.FLOAT_TYPE_NAME, cols.get(1).getType());
    Assert.assertEquals(1, (int) preEvents.get(PreEventContext.PreEventType.READ_SCHEMA_VERSION));
    client.dropSchemaVersion(catName, dbName, schemaName, version);
    try {
        client.getSchemaVersion(catName, dbName, schemaName, version);
        Assert.fail();
    } catch (NoSuchObjectException e) {
    // all good
    }
}
Also used : SchemaVersion(org.apache.hadoop.hive.metastore.api.SchemaVersion) ISchema(org.apache.hadoop.hive.metastore.api.ISchema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) ISchemaBuilder(org.apache.hadoop.hive.metastore.client.builder.ISchemaBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) SchemaVersionBuilder(org.apache.hadoop.hive.metastore.client.builder.SchemaVersionBuilder) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 12 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestHiveMetaStoreSchemaMethods method iSchemaOtherDatabase.

@Test
public void iSchemaOtherDatabase() throws TException {
    String catName = "other_cat";
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
    client.createCatalog(cat);
    String dbName = "other_db";
    Database db = new DatabaseBuilder().setName(dbName).setCatalogName(catName).create(client, conf);
    String schemaName = uniqueSchemaName();
    String schemaGroup = "group1";
    String description = "This is a description";
    ISchema schema = new ISchemaBuilder().setSchemaType(SchemaType.AVRO).setName(schemaName).inDb(db).setCompatibility(SchemaCompatibility.FORWARD).setValidationLevel(SchemaValidation.LATEST).setCanEvolve(false).setSchemaGroup(schemaGroup).setDescription(description).build();
    client.createISchema(schema);
    schema = client.getISchema(catName, dbName, schemaName);
    Assert.assertEquals(SchemaType.AVRO, schema.getSchemaType());
    Assert.assertEquals(schemaName, schema.getName());
    Assert.assertEquals(catName, schema.getCatName());
    Assert.assertEquals(dbName, schema.getDbName());
    Assert.assertEquals(SchemaCompatibility.FORWARD, schema.getCompatibility());
    Assert.assertEquals(SchemaValidation.LATEST, schema.getValidationLevel());
    Assert.assertFalse(schema.isCanEvolve());
    Assert.assertEquals(schemaGroup, schema.getSchemaGroup());
    Assert.assertEquals(description, schema.getDescription());
    schemaGroup = "new group";
    description = "new description";
    schema.setCompatibility(SchemaCompatibility.BOTH);
    schema.setValidationLevel(SchemaValidation.ALL);
    schema.setCanEvolve(true);
    schema.setSchemaGroup(schemaGroup);
    schema.setDescription(description);
    client.alterISchema(catName, dbName, schemaName, schema);
    schema = client.getISchema(catName, dbName, schemaName);
    Assert.assertEquals(SchemaType.AVRO, schema.getSchemaType());
    Assert.assertEquals(schemaName, schema.getName());
    Assert.assertEquals(catName, schema.getCatName());
    Assert.assertEquals(dbName, schema.getDbName());
    Assert.assertEquals(SchemaCompatibility.BOTH, schema.getCompatibility());
    Assert.assertEquals(SchemaValidation.ALL, schema.getValidationLevel());
    Assert.assertTrue(schema.isCanEvolve());
    Assert.assertEquals(schemaGroup, schema.getSchemaGroup());
    Assert.assertEquals(description, schema.getDescription());
    client.dropISchema(catName, dbName, schemaName);
    try {
        client.getISchema(catName, dbName, schemaName);
        Assert.fail();
    } catch (NoSuchObjectException e) {
    // all good
    }
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) ISchemaBuilder(org.apache.hadoop.hive.metastore.client.builder.ISchemaBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) ISchema(org.apache.hadoop.hive.metastore.api.ISchema) Database(org.apache.hadoop.hive.metastore.api.Database) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 13 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestCatalogNonDefaultSvr method getClient.

@Override
protected IMetaStoreClient getClient() throws Exception {
    // Separate client to create the catalog
    catalogCapableClient = new HiveMetaStoreClient(conf);
    catLocation = MetaStoreTestUtils.getTestWarehouseDir(catName);
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(catLocation).build();
    catalogCapableClient.createCatalog(cat);
    catalogCapableClient.close();
    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CATALOG_DEFAULT, catName);
    return new HiveMetaStoreClientPreCatalog(conf);
}
Also used : CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog)

Example 14 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestCatalogNonDefaultClient method getClient.

@Override
protected IMetaStoreClient getClient() throws Exception {
    Configuration svrConf = new Configuration(conf);
    int port = MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), svrConf, false, false, false, false, false);
    // Only set the default catalog on the client.
    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.THRIFT_URIS, "thrift://localhost:" + port);
    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CATALOG_DEFAULT, catName);
    IMetaStoreClient client = new HiveMetaStoreClient(conf);
    assert !client.isLocalMetaStore();
    // Don't make any calls but catalog calls until the catalog has been created, as we just told
    // the client to direct all calls to a catalog that does not yet exist.
    catLocation = MetaStoreTestUtils.getTestWarehouseDir(catName);
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(catLocation).build();
    client.createCatalog(cat);
    return client;
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog)

Example 15 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestDropPartitions method otherCatalog.

@Test
@ConditionalIgnoreOnSessionHiveMetastoreClient
public void otherCatalog() throws TException {
    String catName = "drop_partition_catalog";
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
    client.createCatalog(cat);
    String dbName = "drop_partition_database_in_other_catalog";
    Database db = new DatabaseBuilder().setName(dbName).setCatalogName(catName).create(client, metaStore.getConf());
    String tableName = "table_in_other_catalog";
    Table table = new TableBuilder().inDb(db).setTableName(tableName).addCol("id", "int").addCol("name", "string").addPartCol("partcol", "string").create(client, metaStore.getConf());
    Partition[] parts = new Partition[2];
    for (int i = 0; i < parts.length; i++) {
        parts[i] = new PartitionBuilder().inTable(table).addValue("a" + i).build(metaStore.getConf());
    }
    client.add_partitions(Arrays.asList(parts));
    List<Partition> fetched = client.listPartitions(catName, dbName, tableName, (short) -1);
    Assert.assertEquals(parts.length, fetched.size());
    Assert.assertTrue(client.dropPartition(catName, dbName, tableName, Collections.singletonList("a0"), PartitionDropOptions.instance().ifExists(false)));
    try {
        client.getPartition(catName, dbName, tableName, Collections.singletonList("a0"));
        Assert.fail();
    } catch (NoSuchObjectException e) {
    // NOP
    }
    Assert.assertTrue(client.dropPartition(catName, dbName, tableName, "partcol=a1", true));
    try {
        client.getPartition(catName, dbName, tableName, Collections.singletonList("a1"));
        Assert.fail();
    } catch (NoSuchObjectException e) {
    // NOP
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Aggregations

Catalog (org.apache.hadoop.hive.metastore.api.Catalog)43 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)34 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)26 Test (org.junit.Test)23 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)19 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)19 Database (org.apache.hadoop.hive.metastore.api.Database)18 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)15 Table (org.apache.hadoop.hive.metastore.api.Table)11 ArrayList (java.util.ArrayList)8 Before (org.junit.Before)8 Partition (org.apache.hadoop.hive.metastore.api.Partition)6 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)6 File (java.io.File)4 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)4 ISchema (org.apache.hadoop.hive.metastore.api.ISchema)4 ISchemaBuilder (org.apache.hadoop.hive.metastore.client.builder.ISchemaBuilder)4 HashMap (java.util.HashMap)3 HashSet (java.util.HashSet)3 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)3