use of org.apache.hadoop.hive.metastore.api.Type in project hive by apache.
the class TestAcidTableSetup method testTransactionalValidation.
@Test
public void testTransactionalValidation() throws Throwable {
String dbName = "acidDb";
silentDropDatabase(dbName);
Database db = new Database();
db.setName(dbName);
client.createDatabase(db);
String tblName = "acidTable";
Map<String, String> fields = new HashMap<>();
fields.put("name", ColumnType.STRING_TYPE_NAME);
fields.put("income", ColumnType.INT_TYPE_NAME);
Type type = createType("Person1", fields);
Map<String, String> params = new HashMap<>();
params.put("transactional", "");
// Fail - No "transactional" property is specified
try {
Table t = new TableBuilder().setDbName(dbName).setTableName(tblName).setTableParams(params).setCols(type.getFields()).build();
client.createTable(t);
fail("Expected exception");
} catch (MetaException e) {
assertEquals("'transactional' property of TBLPROPERTIES may only have value 'true': acidDb.acidTable", e.getMessage());
}
// Fail - "transactional" property is set to an invalid value
try {
params.clear();
params.put("transactional", "foobar");
Table t = new TableBuilder().setDbName(dbName).setTableName(tblName).setTableParams(params).setCols(type.getFields()).build();
client.createTable(t);
fail("Expected exception");
} catch (MetaException e) {
assertEquals("'transactional' property of TBLPROPERTIES may only have value 'true': acidDb.acidTable", e.getMessage());
}
// Fail - "transactional" is set to true, but the table is not bucketed
try {
params.clear();
params.put("transactional", "true");
Table t = new TableBuilder().setDbName(dbName).setTableName(tblName).setTableParams(params).setCols(type.getFields()).build();
client.createTable(t);
fail("Expected exception");
} catch (MetaException e) {
assertEquals("The table must be stored using an ACID compliant format (such as ORC): acidDb.acidTable", e.getMessage());
}
List<String> bucketCols = new ArrayList<>();
bucketCols.add("income");
// Fail - "transactional" is set to true, and the table is bucketed, but doesn't use ORC
try {
params.clear();
params.put("transactional", "true");
Table t = new TableBuilder().setDbName(dbName).setTableName(tblName).setTableParams(params).setCols(type.getFields()).setBucketCols(bucketCols).build();
client.createTable(t);
fail("Expected exception");
} catch (MetaException e) {
assertEquals("The table must be stored using an ACID compliant format (such as ORC): acidDb.acidTable", e.getMessage());
}
// Succeed - "transactional" is set to true, and the table is bucketed, and uses ORC
params.clear();
params.put("transactional", "true");
Table t = new TableBuilder().setDbName(dbName).setTableName(tblName).setTableParams(params).setCols(type.getFields()).setBucketCols(bucketCols).setInputFormat("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat").setOutputFormat("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat").build();
client.createTable(t);
assertTrue("CREATE TABLE should succeed", "true".equals(t.getParameters().get(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL)));
// Fail - trying to set "transactional" to "false" is not allowed
try {
params.clear();
params.put("transactional", "false");
t = new Table();
t.setParameters(params);
t.setDbName(dbName);
t.setTableName(tblName);
client.alter_table(dbName, tblName, t);
fail("Expected exception");
} catch (MetaException e) {
assertEquals("TBLPROPERTIES with 'transactional'='true' cannot be unset: acidDb.acidTable", e.getMessage());
}
// Fail - trying to set "transactional" to "true" but doesn't satisfy bucketing and Input/OutputFormat requirement
try {
tblName += "1";
params.clear();
t = new TableBuilder().setDbName(dbName).setTableName(tblName).setCols(type.getFields()).setInputFormat("org.apache.hadoop.mapred.FileInputFormat").build();
client.createTable(t);
params.put("transactional", "true");
t.setParameters(params);
client.alter_table(dbName, tblName, t);
fail("Expected exception");
} catch (MetaException e) {
assertEquals("The table must be stored using an ACID compliant format (such as ORC): acidDb.acidTable1", e.getMessage());
}
// Succeed - trying to set "transactional" to "true", and satisfies bucketing and Input/OutputFormat requirement
tblName += "2";
params.clear();
t = new TableBuilder().setDbName(dbName).setTableName(tblName).setCols(type.getFields()).setNumBuckets(1).setBucketCols(bucketCols).setInputFormat("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat").setOutputFormat("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat").build();
client.createTable(t);
params.put("transactional", "true");
t.setParameters(params);
client.alter_table(dbName, tblName, t);
assertTrue("ALTER TABLE should succeed", "true".equals(t.getParameters().get(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL)));
}
use of org.apache.hadoop.hive.metastore.api.Type in project hive by apache.
the class TestHiveMetaStore method testComplexTable.
@Test
public void testComplexTable() throws Exception {
String dbName = "compdb";
String tblName = "comptbl";
String typeName = "Person";
try {
client.dropTable(dbName, tblName);
silentDropDatabase(dbName);
Database db = new Database();
db.setName(dbName);
client.createDatabase(db);
client.dropType(typeName);
Type typ1 = new Type();
typ1.setName(typeName);
typ1.setFields(new ArrayList<>(2));
typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
client.createType(typ1);
Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).setCols(typ1.getFields()).addPartCol("ds", ColumnType.DATE_TYPE_NAME).addPartCol("hr", ColumnType.INT_TYPE_NAME).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").build();
client.createTable(tbl);
Table tbl2 = client.getTable(dbName, tblName);
assertEquals(tbl2.getDbName(), dbName);
assertEquals(tbl2.getTableName(), tblName);
assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size());
assertFalse(tbl2.getSd().isCompressed());
assertFalse(tbl2.getSd().isStoredAsSubDirectories());
assertEquals(tbl2.getSd().getNumBuckets(), 1);
assertEquals("Use this for comments etc", tbl2.getSd().getParameters().get("test_param_1"));
assertEquals("name", tbl2.getSd().getBucketCols().get(0));
assertNotNull(tbl2.getPartitionKeys());
assertEquals(2, tbl2.getPartitionKeys().size());
assertEquals(ColumnType.DATE_TYPE_NAME, tbl2.getPartitionKeys().get(0).getType());
assertEquals(ColumnType.INT_TYPE_NAME, tbl2.getPartitionKeys().get(1).getType());
assertEquals("ds", tbl2.getPartitionKeys().get(0).getName());
assertEquals("hr", tbl2.getPartitionKeys().get(1).getName());
List<FieldSchema> fieldSchemas = client.getFields(dbName, tblName);
assertNotNull(fieldSchemas);
assertEquals(fieldSchemas.size(), tbl.getSd().getCols().size());
for (FieldSchema fs : tbl.getSd().getCols()) {
assertTrue(fieldSchemas.contains(fs));
}
List<FieldSchema> fieldSchemasFull = client.getSchema(dbName, tblName);
assertNotNull(fieldSchemasFull);
assertEquals(fieldSchemasFull.size(), tbl.getSd().getCols().size() + tbl.getPartitionKeys().size());
for (FieldSchema fs : tbl.getSd().getCols()) {
assertTrue(fieldSchemasFull.contains(fs));
}
for (FieldSchema fs : tbl.getPartitionKeys()) {
assertTrue(fieldSchemasFull.contains(fs));
}
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testComplexTable() failed.");
throw e;
} finally {
client.dropTable(dbName, tblName);
boolean ret = client.dropType(typeName);
assertTrue("Unable to drop type " + typeName, ret);
client.dropDatabase(dbName);
}
}
use of org.apache.hadoop.hive.metastore.api.Type in project hive by apache.
the class TestHiveMetaStore method createType.
private Type createType(String typeName, Map<String, String> fields) throws Throwable {
Type typ1 = new Type();
typ1.setName(typeName);
typ1.setFields(new ArrayList<>(fields.size()));
for (String fieldName : fields.keySet()) {
typ1.getFields().add(new FieldSchema(fieldName, fields.get(fieldName), ""));
}
client.createType(typ1);
return typ1;
}
use of org.apache.hadoop.hive.metastore.api.Type in project hive by apache.
the class TestHiveMetaStore method testSimpleTypeApi.
@Test
public void testSimpleTypeApi() throws Exception {
try {
client.dropType(ColumnType.INT_TYPE_NAME);
Type typ1 = new Type();
typ1.setName(ColumnType.INT_TYPE_NAME);
boolean ret = client.createType(typ1);
assertTrue("Unable to create type", ret);
Type typ1_2 = client.getType(ColumnType.INT_TYPE_NAME);
assertNotNull(typ1_2);
assertEquals(typ1.getName(), typ1_2.getName());
ret = client.dropType(ColumnType.INT_TYPE_NAME);
assertTrue("unable to drop type integer", ret);
boolean exceptionThrown = false;
try {
client.getType(ColumnType.INT_TYPE_NAME);
} catch (NoSuchObjectException e) {
exceptionThrown = true;
}
assertTrue("Expected NoSuchObjectException", exceptionThrown);
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testSimpleTypeApi() failed.");
throw e;
}
}
Aggregations