use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema in project hive by apache.
the class TestHCatClient method testBasicDDLCommands.
@Test
public void testBasicDDLCommands() throws Exception {
String db = "testdb";
String tableOne = "testTable1";
String tableTwo = "testTable2";
String tableThree = "testTable3";
HCatClient client = HCatClient.create(new Configuration(hcatConf));
client.dropDatabase(db, true, HCatClient.DropDBMode.CASCADE);
HCatCreateDBDesc dbDesc = HCatCreateDBDesc.create(db).ifNotExists(false).build();
client.createDatabase(dbDesc);
List<String> dbNames = client.listDatabaseNamesByPattern("*");
assertTrue(dbNames.contains("default"));
assertTrue(dbNames.contains(db));
HCatDatabase testDb = client.getDatabase(db);
assertTrue(testDb.getComment() == null);
assertTrue(testDb.getProperties().size() == 0);
String warehouseDir = System.getProperty("test.warehouse.dir", "/user/hive/warehouse");
if (useExternalMS) {
assertTrue(testDb.getLocation().matches(".*" + "/" + db + ".db"));
} else {
String expectedDir = warehouseDir.replaceFirst("pfile:///", "pfile:/") + "/" + msPort;
assertEquals(expectedDir + "/" + db + ".db", testDb.getLocation());
}
ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
HCatCreateTableDesc tableDesc = HCatCreateTableDesc.create(db, tableOne, cols).fileFormat("rcfile").build();
client.createTable(tableDesc);
HCatTable table1 = client.getTable(db, tableOne);
assertTrue(table1.getInputFileFormat().equalsIgnoreCase(RCFileInputFormat.class.getName()));
assertTrue(table1.getOutputFileFormat().equalsIgnoreCase(RCFileOutputFormat.class.getName()));
assertTrue(table1.getSerdeLib().equalsIgnoreCase(LazyBinaryColumnarSerDe.class.getName()));
assertTrue(table1.getCols().equals(cols));
// will result in an exception.
try {
client.createTable(tableDesc);
fail("Expected exception");
} catch (HCatException e) {
assertTrue(e.getMessage().contains("AlreadyExistsException while creating table."));
}
client.dropTable(db, tableOne, true);
HCatCreateTableDesc tableDesc2 = HCatCreateTableDesc.create(db, tableTwo, cols).fieldsTerminatedBy('\001').escapeChar('\002').linesTerminatedBy('\003').mapKeysTerminatedBy('\004').collectionItemsTerminatedBy('\005').nullDefinedAs('\006').build();
client.createTable(tableDesc2);
HCatTable table2 = client.getTable(db, tableTwo);
assertTrue("Expected TextInputFormat, but got: " + table2.getInputFileFormat(), table2.getInputFileFormat().equalsIgnoreCase(TextInputFormat.class.getName()));
assertTrue(table2.getOutputFileFormat().equalsIgnoreCase(HiveIgnoreKeyTextOutputFormat.class.getName()));
assertTrue("SerdeParams not found", table2.getSerdeParams() != null);
assertEquals("checking " + serdeConstants.FIELD_DELIM, Character.toString('\001'), table2.getSerdeParams().get(serdeConstants.FIELD_DELIM));
assertEquals("checking " + serdeConstants.ESCAPE_CHAR, Character.toString('\002'), table2.getSerdeParams().get(serdeConstants.ESCAPE_CHAR));
assertEquals("checking " + serdeConstants.LINE_DELIM, Character.toString('\003'), table2.getSerdeParams().get(serdeConstants.LINE_DELIM));
assertEquals("checking " + serdeConstants.MAPKEY_DELIM, Character.toString('\004'), table2.getSerdeParams().get(serdeConstants.MAPKEY_DELIM));
assertEquals("checking " + serdeConstants.COLLECTION_DELIM, Character.toString('\005'), table2.getSerdeParams().get(serdeConstants.COLLECTION_DELIM));
assertEquals("checking " + serdeConstants.SERIALIZATION_NULL_FORMAT, Character.toString('\006'), table2.getSerdeParams().get(serdeConstants.SERIALIZATION_NULL_FORMAT));
assertTrue(table2.getLocation().toLowerCase().matches(".*" + ("/" + db + ".db/" + tableTwo).toLowerCase()));
HCatCreateTableDesc tableDesc3 = HCatCreateTableDesc.create(db, tableThree, cols).fileFormat("orcfile").build();
client.createTable(tableDesc3);
HCatTable table3 = client.getTable(db, tableThree);
assertTrue(table3.getInputFileFormat().equalsIgnoreCase(OrcInputFormat.class.getName()));
assertTrue(table3.getOutputFileFormat().equalsIgnoreCase(OrcOutputFormat.class.getName()));
assertTrue(table3.getSerdeLib().equalsIgnoreCase(OrcSerde.class.getName()));
assertTrue(table1.getCols().equals(cols));
client.close();
}
use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema in project hive by apache.
the class TestHCatClient method testEmptyTableInstantiation.
/**
* This test tests that a plain table instantiation matches what hive says an
* empty table create should look like.
* @throws Exception
*/
@Test
public void testEmptyTableInstantiation() throws Exception {
HCatClient client = HCatClient.create(new Configuration(hcatConf));
String dbName = "default";
String tblName = "testEmptyCreate";
ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
client.dropTable(dbName, tblName, true);
// Create a minimalistic table
client.createTable(HCatCreateTableDesc.create(new HCatTable(dbName, tblName).cols(cols), false).build());
HCatTable tCreated = client.getTable(dbName, tblName);
org.apache.hadoop.hive.metastore.api.Table emptyTable = Table.getEmptyTable(dbName, tblName);
Map<String, String> createdProps = tCreated.getTblProps();
Map<String, String> emptyProps = emptyTable.getParameters();
mapEqualsContainedIn(emptyProps, createdProps);
// Test sd params - we check that all the parameters in an empty table
// are retained as-is. We may add beyond it, but not change values for
// any parameters that hive defines for an empty table.
Map<String, String> createdSdParams = tCreated.getSerdeParams();
Map<String, String> emptySdParams = emptyTable.getSd().getSerdeInfo().getParameters();
mapEqualsContainedIn(emptySdParams, createdSdParams);
}
use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema in project hive by apache.
the class TestCommands method testDropTableCommand2.
@Test
public void testDropTableCommand2() throws HCatException, MetaException {
// Secondary DropTableCommand test for testing repl-drop-tables' effect on partitions inside a partitioned table
// when there exist partitions inside the table which are older than the drop event.
// Our goal is this : Create a table t, with repl.last.id=157, say.
// Create 2 partitions inside it, with repl.last.id=150 and 160, say.
// Now, process a drop table command with eventid=155.
// It should result in the table and the partition with repl.last.id=160 continuing to exist,
// but dropping the partition with repl.last.id=150.
String dbName = "cmd_testdb";
String tableName = "cmd_testtable";
int evid = 157;
List<HCatFieldSchema> pcols = HCatSchemaUtils.getHCatSchema("b:string").getFields();
List<HCatFieldSchema> cols = HCatSchemaUtils.getHCatSchema("a:int").getFields();
Command testReplicatedDropCmd = new DropTableCommand(dbName, tableName, true, evid);
client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
Map<String, String> tprops = new HashMap<String, String>();
tprops.put(ReplicationUtils.REPL_STATE_ID, String.valueOf(evid + 2));
HCatTable table = (new HCatTable(dbName, tableName)).tblProps(tprops).cols(cols).partCols(pcols);
client.createTable(HCatCreateTableDesc.create(table).build());
HCatTable tableCreated = client.getTable(dbName, tableName);
assertNotNull(tableCreated);
Map<String, String> ptnDesc1 = new HashMap<String, String>();
ptnDesc1.put("b", "test-older");
Map<String, String> props1 = new HashMap<String, String>();
props1.put(ReplicationUtils.REPL_STATE_ID, String.valueOf(evid - 5));
HCatPartition ptnToAdd1 = (new HCatPartition(tableCreated, ptnDesc1, TestHCatClient.makePartLocation(tableCreated, ptnDesc1))).parameters(props1);
client.addPartition(HCatAddPartitionDesc.create(ptnToAdd1).build());
Map<String, String> ptnDesc2 = new HashMap<String, String>();
ptnDesc2.put("b", "test-newer");
Map<String, String> props2 = new HashMap<String, String>();
props2.put(ReplicationUtils.REPL_STATE_ID, String.valueOf(evid + 5));
HCatPartition ptnToAdd2 = (new HCatPartition(tableCreated, ptnDesc2, TestHCatClient.makePartLocation(tableCreated, ptnDesc2))).parameters(props2);
client.addPartition(HCatAddPartitionDesc.create(ptnToAdd2).build());
HCatPartition p1 = client.getPartition(dbName, tableName, ptnDesc1);
assertNotNull(p1);
HCatPartition p2 = client.getPartition(dbName, tableName, ptnDesc2);
assertNotNull(p2);
LOG.info("About to run :" + testReplicatedDropCmd.get().get(0));
driver.run(testReplicatedDropCmd.get().get(0));
HCatTable t_stillExists = client.getTable(dbName, tableName);
assertNotNull(t_stillExists);
HCatPartition p2_stillExists = client.getPartition(dbName, tableName, ptnDesc2);
Exception onfe = null;
try {
HCatPartition p1_del = client.getPartition(dbName, tableName, ptnDesc1);
} catch (Exception e) {
onfe = e;
}
assertNotNull(onfe);
assertTrue(onfe instanceof ObjectNotFoundException);
}
use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema in project hive by apache.
the class TestCommands method testBasicReplEximCommands.
@Test
public void testBasicReplEximCommands() throws IOException {
// repl export, has repl.last.id and repl.scope=all in it
// import repl dump, table has repl.last.id on it (will likely be 0)
int evid = 111;
String exportLocation = TEST_PATH + File.separator + "testBasicReplExim";
Path tempPath = new Path(TEST_PATH, "testBasicReplEximTmp");
String tempLocation = tempPath.toUri().getPath();
String dbName = "exim";
String tableName = "basicSrc";
String importedTableName = "basicDst";
List<HCatFieldSchema> cols = HCatSchemaUtils.getHCatSchema("b:string").getFields();
client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
HCatTable table = (new HCatTable(dbName, tableName)).cols(cols).fileFormat("textfile");
client.createTable(HCatCreateTableDesc.create(table).build());
HCatTable t = client.getTable(dbName, tableName);
assertNotNull(t);
String[] data = new String[] { "eleven", "twelve" };
HcatTestUtils.createTestDataFile(tempLocation, data);
CommandProcessorResponse ret = driver.run("LOAD DATA LOCAL INPATH '" + tempLocation + "' OVERWRITE INTO TABLE " + dbName + "." + tableName);
assertEquals(ret.getResponseCode() + ":" + ret.getErrorMessage(), null, ret.getException());
CommandProcessorResponse selectRet = driver.run("SELECT * from " + dbName + "." + tableName);
assertEquals(selectRet.getResponseCode() + ":" + selectRet.getErrorMessage(), null, selectRet.getException());
List<String> values = new ArrayList<String>();
driver.getResults(values);
assertEquals(2, values.size());
assertEquals(data[0], values.get(0));
assertEquals(data[1], values.get(1));
ExportCommand exportCmd = new ExportCommand(dbName, tableName, null, exportLocation, false, evid);
LOG.info("About to run :" + exportCmd.get().get(0));
CommandProcessorResponse ret2 = driver.run(exportCmd.get().get(0));
assertEquals(ret2.getResponseCode() + ":" + ret2.getErrorMessage(), null, ret2.getException());
List<String> exportPaths = exportCmd.cleanupLocationsAfterEvent();
assertEquals(1, exportPaths.size());
String metadata = getMetadataContents(exportPaths.get(0));
LOG.info("Export returned the following _metadata contents:");
LOG.info(metadata);
assertTrue(metadata + "did not match \"repl.scope\"=\"all\"", metadata.matches(".*\"repl.scope\":\"all\".*"));
assertTrue(metadata + "has \"repl.last.id\"", metadata.matches(".*\"repl.last.id\":.*"));
ImportCommand importCmd = new ImportCommand(dbName, importedTableName, null, exportLocation, false, evid);
LOG.info("About to run :" + importCmd.get().get(0));
CommandProcessorResponse ret3 = driver.run(importCmd.get().get(0));
assertEquals(ret3.getResponseCode() + ":" + ret3.getErrorMessage(), null, ret3.getException());
CommandProcessorResponse selectRet2 = driver.run("SELECT * from " + dbName + "." + importedTableName);
assertEquals(selectRet2.getResponseCode() + ":" + selectRet2.getErrorMessage(), null, selectRet2.getException());
List<String> values2 = new ArrayList<String>();
driver.getResults(values2);
assertEquals(2, values2.size());
assertEquals(data[0], values2.get(0));
assertEquals(data[1], values2.get(1));
HCatTable importedTable = client.getTable(dbName, importedTableName);
assertNotNull(importedTable);
assertTrue(importedTable.getTblProps().containsKey("repl.last.id"));
}
use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema in project hive by apache.
the class FosterStorageHandler method configureInputJobProperties.
@Override
public void configureInputJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
try {
Map<String, String> tableProperties = tableDesc.getJobProperties();
String jobInfoProperty = tableProperties.get(HCatConstants.HCAT_KEY_JOB_INFO);
if (jobInfoProperty != null) {
InputJobInfo inputJobInfo = (InputJobInfo) HCatUtil.deserialize(jobInfoProperty);
HCatTableInfo tableInfo = inputJobInfo.getTableInfo();
HCatSchema dataColumns = tableInfo.getDataColumns();
List<HCatFieldSchema> dataFields = dataColumns.getFields();
StringBuilder columnNamesSb = new StringBuilder();
StringBuilder typeNamesSb = new StringBuilder();
for (HCatFieldSchema dataField : dataFields) {
if (columnNamesSb.length() > 0) {
columnNamesSb.append(",");
typeNamesSb.append(":");
}
columnNamesSb.append(dataField.getName());
typeNamesSb.append(dataField.getTypeString());
}
jobProperties.put(IOConstants.SCHEMA_EVOLUTION_COLUMNS, columnNamesSb.toString());
jobProperties.put(IOConstants.SCHEMA_EVOLUTION_COLUMNS_TYPES, typeNamesSb.toString());
boolean isTransactionalTable = AcidUtils.isTablePropertyTransactional(tableProperties);
AcidUtils.AcidOperationalProperties acidOperationalProperties = AcidUtils.getAcidOperationalProperties(tableProperties);
AcidUtils.setAcidOperationalProperties(jobProperties, isTransactionalTable, acidOperationalProperties);
}
} catch (IOException e) {
throw new IllegalStateException("Failed to set output path", e);
}
}
Aggregations