use of org.apache.hadoop.hive.metastore.api.ISchema in project hive by apache.
the class TestHiveMetaStoreSchemaMethods method alterNonExistentSchema.
@Test(expected = NoSuchObjectException.class)
public void alterNonExistentSchema() throws TException {
String schemaName = uniqueSchemaName();
ISchema schema = new ISchemaBuilder().setSchemaType(SchemaType.HIVE).setName(schemaName).setDescription("a new description").build();
client.alterISchema(DEFAULT_DATABASE_NAME, schemaName, schema);
}
use of org.apache.hadoop.hive.metastore.api.ISchema in project hive by apache.
the class TestHiveMetaStoreSchemaMethods method setVersionStateNoSuchVersion.
@Test(expected = NoSuchObjectException.class)
public void setVersionStateNoSuchVersion() throws TException {
String schemaName = uniqueSchemaName();
ISchema schema = new ISchemaBuilder().setSchemaType(SchemaType.AVRO).setName(schemaName).build();
client.createISchema(schema);
client.setSchemaVersionState(DEFAULT_DATABASE_NAME, schemaName, 1, SchemaVersionState.INITIATED);
}
use of org.apache.hadoop.hive.metastore.api.ISchema in project hive by apache.
the class TestHiveMetaStoreSchemaMethods method schemaWithInvalidDatabase.
@Test(expected = NoSuchObjectException.class)
public void schemaWithInvalidDatabase() throws TException {
ISchema schema = new ISchemaBuilder().setName("thisSchemaDoesntHaveADb").setDbName("no.such.database").setSchemaType(SchemaType.AVRO).build();
client.createISchema(schema);
}
use of org.apache.hadoop.hive.metastore.api.ISchema in project hive by apache.
the class TestHiveMetaStoreSchemaMethods method addSchemaVersionOtherDb.
@Test
public void addSchemaVersionOtherDb() throws TException {
String dbName = "other_db_for_schema_version";
Database db = new DatabaseBuilder().setName(dbName).build();
client.createDatabase(db);
String schemaName = uniqueSchemaName();
int version = 1;
ISchema schema = new ISchemaBuilder().setDbName(dbName).setSchemaType(SchemaType.AVRO).setName(schemaName).build();
client.createISchema(schema);
String description = "very descriptive";
String schemaText = "this should look like json, but oh well";
String fingerprint = "this should be an md5 string";
String versionName = "why would I name a version?";
long creationTime = 10;
String serdeName = "serde_for_schema37";
String serializer = "org.apache.hadoop.hive.metastore.test.Serializer";
String deserializer = "org.apache.hadoop.hive.metastore.test.Deserializer";
String serdeDescription = "how do you describe a serde?";
SchemaVersion schemaVersion = new SchemaVersionBuilder().versionOf(schema).setVersion(version).addCol("a", ColumnType.INT_TYPE_NAME).addCol("b", ColumnType.FLOAT_TYPE_NAME).setCreatedAt(creationTime).setState(SchemaVersionState.INITIATED).setDescription(description).setSchemaText(schemaText).setFingerprint(fingerprint).setName(versionName).setSerdeName(serdeName).setSerdeSerializerClass(serializer).setSerdeDeserializerClass(deserializer).setSerdeDescription(serdeDescription).build();
client.addSchemaVersion(schemaVersion);
schemaVersion = client.getSchemaVersion(dbName, schemaName, version);
Assert.assertNotNull(schemaVersion);
Assert.assertEquals(schemaName, schemaVersion.getSchema().getSchemaName());
Assert.assertEquals(dbName, schemaVersion.getSchema().getDbName());
Assert.assertEquals(version, schemaVersion.getVersion());
Assert.assertEquals(creationTime, schemaVersion.getCreatedAt());
Assert.assertEquals(SchemaVersionState.INITIATED, schemaVersion.getState());
Assert.assertEquals(description, schemaVersion.getDescription());
Assert.assertEquals(schemaText, schemaVersion.getSchemaText());
Assert.assertEquals(fingerprint, schemaVersion.getFingerprint());
Assert.assertEquals(versionName, schemaVersion.getName());
Assert.assertEquals(serdeName, schemaVersion.getSerDe().getName());
Assert.assertEquals(serializer, schemaVersion.getSerDe().getSerializerClass());
Assert.assertEquals(deserializer, schemaVersion.getSerDe().getDeserializerClass());
Assert.assertEquals(serdeDescription, schemaVersion.getSerDe().getDescription());
Assert.assertEquals(2, schemaVersion.getColsSize());
List<FieldSchema> cols = schemaVersion.getCols();
Collections.sort(cols);
Assert.assertEquals("a", cols.get(0).getName());
Assert.assertEquals(ColumnType.INT_TYPE_NAME, cols.get(0).getType());
Assert.assertEquals("b", cols.get(1).getName());
Assert.assertEquals(ColumnType.FLOAT_TYPE_NAME, cols.get(1).getType());
Assert.assertEquals(1, (int) preEvents.get(PreEventContext.PreEventType.READ_SCHEMA_VERSION));
client.dropSchemaVersion(dbName, schemaName, version);
try {
client.getSchemaVersion(dbName, schemaName, version);
Assert.fail();
} catch (NoSuchObjectException e) {
// all good
}
}
use of org.apache.hadoop.hive.metastore.api.ISchema in project hive by apache.
the class TestHiveMetaStoreSchemaMethods method iSchema.
@Test
public void iSchema() throws TException {
String schemaName = uniqueSchemaName();
String schemaGroup = "group1";
String description = "This is a description";
ISchema schema = new ISchemaBuilder().setSchemaType(SchemaType.AVRO).setName(schemaName).setCompatibility(SchemaCompatibility.FORWARD).setValidationLevel(SchemaValidation.LATEST).setCanEvolve(false).setSchemaGroup(schemaGroup).setDescription(description).build();
client.createISchema(schema);
Assert.assertEquals(1, (int) preEvents.get(PreEventContext.PreEventType.CREATE_ISCHEMA));
Assert.assertEquals(1, (int) events.get(EventMessage.EventType.CREATE_ISCHEMA));
Assert.assertEquals(1, (int) transactionalEvents.get(EventMessage.EventType.CREATE_ISCHEMA));
schema = client.getISchema(DEFAULT_DATABASE_NAME, schemaName);
Assert.assertEquals(1, (int) preEvents.get(PreEventContext.PreEventType.READ_ISCHEMA));
Assert.assertEquals(SchemaType.AVRO, schema.getSchemaType());
Assert.assertEquals(schemaName, schema.getName());
Assert.assertEquals(SchemaCompatibility.FORWARD, schema.getCompatibility());
Assert.assertEquals(SchemaValidation.LATEST, schema.getValidationLevel());
Assert.assertFalse(schema.isCanEvolve());
Assert.assertEquals(schemaGroup, schema.getSchemaGroup());
Assert.assertEquals(description, schema.getDescription());
schemaGroup = "new group";
description = "new description";
schema.setCompatibility(SchemaCompatibility.BOTH);
schema.setValidationLevel(SchemaValidation.ALL);
schema.setCanEvolve(true);
schema.setSchemaGroup(schemaGroup);
schema.setDescription(description);
client.alterISchema(DEFAULT_DATABASE_NAME, schemaName, schema);
Assert.assertEquals(1, (int) preEvents.get(PreEventContext.PreEventType.ALTER_ISCHEMA));
Assert.assertEquals(1, (int) events.get(EventMessage.EventType.ALTER_ISCHEMA));
Assert.assertEquals(1, (int) transactionalEvents.get(EventMessage.EventType.ALTER_ISCHEMA));
schema = client.getISchema(DEFAULT_DATABASE_NAME, schemaName);
Assert.assertEquals(2, (int) preEvents.get(PreEventContext.PreEventType.READ_ISCHEMA));
Assert.assertEquals(SchemaType.AVRO, schema.getSchemaType());
Assert.assertEquals(schemaName, schema.getName());
Assert.assertEquals(SchemaCompatibility.BOTH, schema.getCompatibility());
Assert.assertEquals(SchemaValidation.ALL, schema.getValidationLevel());
Assert.assertTrue(schema.isCanEvolve());
Assert.assertEquals(schemaGroup, schema.getSchemaGroup());
Assert.assertEquals(description, schema.getDescription());
client.dropISchema(DEFAULT_DATABASE_NAME, schemaName);
Assert.assertEquals(1, (int) preEvents.get(PreEventContext.PreEventType.DROP_ISCHEMA));
Assert.assertEquals(1, (int) events.get(EventMessage.EventType.DROP_ISCHEMA));
Assert.assertEquals(1, (int) transactionalEvents.get(EventMessage.EventType.DROP_ISCHEMA));
try {
client.getISchema(DEFAULT_DATABASE_NAME, schemaName);
Assert.fail();
} catch (NoSuchObjectException e) {
// all good
}
}
Aggregations