Search in sources :

Example 1 with HCatTable

use of org.apache.hive.hcatalog.api.HCatTable in project hive by apache.

the class TestCommands method testDropTableCommand.

@Test
public void testDropTableCommand() throws HCatException, CommandNeedRetryException {
    String dbName = "cmd_testdb";
    String tableName = "cmd_testtable";
    int evid = 789;
    List<HCatFieldSchema> cols = HCatSchemaUtils.getHCatSchema("a:int,b:string").getFields();
    Command testReplicatedDropCmd = new DropTableCommand(dbName, tableName, true, evid);
    assertEquals(evid, testReplicatedDropCmd.getEventId());
    assertEquals(1, testReplicatedDropCmd.get().size());
    assertEquals(true, testReplicatedDropCmd.isRetriable());
    assertEquals(false, testReplicatedDropCmd.isUndoable());
    CommandTestUtils.testCommandSerialization(testReplicatedDropCmd);
    Command testNormalDropCmd = new DropTableCommand(dbName, tableName, false, evid);
    assertEquals(evid, testNormalDropCmd.getEventId());
    assertEquals(1, testNormalDropCmd.get().size());
    assertEquals(true, testNormalDropCmd.isRetriable());
    assertEquals(false, testNormalDropCmd.isUndoable());
    CommandTestUtils.testCommandSerialization(testNormalDropCmd);
    client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
    client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
    Map<String, String> tprops = new HashMap<String, String>();
    tprops.put(ReplicationUtils.REPL_STATE_ID, String.valueOf(evid + 5));
    HCatTable tableToCreate = (new HCatTable(dbName, tableName)).tblProps(tprops).cols(cols);
    client.createTable(HCatCreateTableDesc.create(tableToCreate).build());
    HCatTable t1 = client.getTable(dbName, tableName);
    assertNotNull(t1);
    // Test replicated drop, should not drop, because evid < repl.state.id
    LOG.info("About to run :" + testReplicatedDropCmd.get().get(0));
    driver.run(testReplicatedDropCmd.get().get(0));
    HCatTable t2 = client.getTable(dbName, tableName);
    assertNotNull(t2);
    // Test normal drop, should drop unconditionally.
    LOG.info("About to run :" + testNormalDropCmd.get().get(0));
    driver.run(testNormalDropCmd.get().get(0));
    Exception onfe = null;
    try {
        HCatTable t_del = client.getTable(dbName, tableName);
    } catch (Exception e) {
        onfe = e;
    }
    assertNotNull(onfe);
    assertTrue(onfe instanceof ObjectNotFoundException);
    Map<String, String> tprops2 = new HashMap<String, String>();
    tprops2.put(ReplicationUtils.REPL_STATE_ID, String.valueOf(evid - 5));
    HCatTable tableToCreate2 = (new HCatTable(dbName, tableName)).tblProps(tprops2).cols(cols);
    client.createTable(HCatCreateTableDesc.create(tableToCreate2).build());
    HCatTable t3 = client.getTable(dbName, tableName);
    assertNotNull(t3);
    // Test replicated drop, should drop this time, since repl.state.id < evid.
    LOG.info("About to run :" + testReplicatedDropCmd.get().get(0));
    driver.run(testReplicatedDropCmd.get().get(0));
    Exception onfe2 = null;
    try {
        HCatTable t_del = client.getTable(dbName, tableName);
    } catch (Exception e) {
        onfe2 = e;
    }
    assertNotNull(onfe2);
    assertTrue(onfe2 instanceof ObjectNotFoundException);
}
Also used : Command(org.apache.hive.hcatalog.api.repl.Command) HashMap(java.util.HashMap) ObjectNotFoundException(org.apache.hive.hcatalog.api.ObjectNotFoundException) HCatTable(org.apache.hive.hcatalog.api.HCatTable) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HCatException(org.apache.hive.hcatalog.common.HCatException) CommandNeedRetryException(org.apache.hadoop.hive.ql.CommandNeedRetryException) ObjectNotFoundException(org.apache.hive.hcatalog.api.ObjectNotFoundException) IOException(java.io.IOException) HCatFieldSchema(org.apache.hive.hcatalog.data.schema.HCatFieldSchema) Test(org.junit.Test)

Example 2 with HCatTable

use of org.apache.hive.hcatalog.api.HCatTable in project hive by apache.

the class TestCommands method testDropPartitionCommand.

@Test
public void testDropPartitionCommand() throws HCatException, CommandNeedRetryException, MetaException {
    String dbName = "cmd_testdb";
    String tableName = "cmd_testtable";
    int evid = 789;
    List<HCatFieldSchema> pcols = HCatSchemaUtils.getHCatSchema("b:string").getFields();
    List<HCatFieldSchema> cols = HCatSchemaUtils.getHCatSchema("a:int").getFields();
    Map<String, String> ptnDesc = new HashMap<String, String>();
    ptnDesc.put("b", "test");
    Command testReplicatedDropPtnCmd = new DropPartitionCommand(dbName, tableName, ptnDesc, true, evid);
    assertEquals(evid, testReplicatedDropPtnCmd.getEventId());
    assertEquals(1, testReplicatedDropPtnCmd.get().size());
    assertEquals(true, testReplicatedDropPtnCmd.isRetriable());
    assertEquals(false, testReplicatedDropPtnCmd.isUndoable());
    CommandTestUtils.testCommandSerialization(testReplicatedDropPtnCmd);
    Command testNormalDropPtnCmd = new DropPartitionCommand(dbName, tableName, ptnDesc, false, evid);
    assertEquals(evid, testNormalDropPtnCmd.getEventId());
    assertEquals(1, testNormalDropPtnCmd.get().size());
    assertEquals(true, testNormalDropPtnCmd.isRetriable());
    assertEquals(false, testNormalDropPtnCmd.isUndoable());
    CommandTestUtils.testCommandSerialization(testNormalDropPtnCmd);
    client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
    client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
    Map<String, String> props = new HashMap<String, String>();
    props.put(ReplicationUtils.REPL_STATE_ID, String.valueOf(evid + 5));
    HCatTable table = (new HCatTable(dbName, tableName)).tblProps(props).cols(cols).partCols(pcols);
    client.createTable(HCatCreateTableDesc.create(table).build());
    HCatTable tableCreated = client.getTable(dbName, tableName);
    assertNotNull(tableCreated);
    HCatPartition ptnToAdd = (new HCatPartition(tableCreated, ptnDesc, TestHCatClient.makePartLocation(tableCreated, ptnDesc))).parameters(props);
    client.addPartition(HCatAddPartitionDesc.create(ptnToAdd).build());
    HCatPartition p1 = client.getPartition(dbName, tableName, ptnDesc);
    assertNotNull(p1);
    // Test replicated drop, should not drop, because evid < repl.state.id
    LOG.info("About to run :" + testReplicatedDropPtnCmd.get().get(0));
    driver.run(testReplicatedDropPtnCmd.get().get(0));
    HCatPartition p2 = client.getPartition(dbName, tableName, ptnDesc);
    assertNotNull(p2);
    // Test normal drop, should drop unconditionally.
    LOG.info("About to run :" + testNormalDropPtnCmd.get().get(0));
    driver.run(testNormalDropPtnCmd.get().get(0));
    Exception onfe = null;
    try {
        HCatPartition p_del = client.getPartition(dbName, tableName, ptnDesc);
    } catch (Exception e) {
        onfe = e;
    }
    assertNotNull(onfe);
    assertTrue(onfe instanceof ObjectNotFoundException);
    Map<String, String> props2 = new HashMap<String, String>();
    props2.put(ReplicationUtils.REPL_STATE_ID, String.valueOf(evid - 5));
    HCatPartition ptnToAdd2 = (new HCatPartition(tableCreated, ptnDesc, TestHCatClient.makePartLocation(tableCreated, ptnDesc))).parameters(props2);
    client.addPartition(HCatAddPartitionDesc.create(ptnToAdd2).build());
    HCatPartition p3 = client.getPartition(dbName, tableName, ptnDesc);
    assertNotNull(p3);
    // Test replicated drop, should drop this time, since repl.state.id < evid.
    LOG.info("About to run :" + testReplicatedDropPtnCmd.get().get(0));
    driver.run(testReplicatedDropPtnCmd.get().get(0));
    Exception onfe2 = null;
    try {
        HCatPartition p_del = client.getPartition(dbName, tableName, ptnDesc);
    } catch (Exception e) {
        onfe2 = e;
    }
    assertNotNull(onfe2);
    assertTrue(onfe2 instanceof ObjectNotFoundException);
}
Also used : HashMap(java.util.HashMap) Command(org.apache.hive.hcatalog.api.repl.Command) ObjectNotFoundException(org.apache.hive.hcatalog.api.ObjectNotFoundException) HCatTable(org.apache.hive.hcatalog.api.HCatTable) HCatPartition(org.apache.hive.hcatalog.api.HCatPartition) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HCatException(org.apache.hive.hcatalog.common.HCatException) CommandNeedRetryException(org.apache.hadoop.hive.ql.CommandNeedRetryException) ObjectNotFoundException(org.apache.hive.hcatalog.api.ObjectNotFoundException) IOException(java.io.IOException) HCatFieldSchema(org.apache.hive.hcatalog.data.schema.HCatFieldSchema) Test(org.junit.Test)

Example 3 with HCatTable

use of org.apache.hive.hcatalog.api.HCatTable in project hive by apache.

the class TestCommands method testMetadataReplEximCommands.

@Test
public void testMetadataReplEximCommands() throws IOException, CommandNeedRetryException {
    // repl metadata export, has repl.last.id and repl.scope=metadata
    // import repl metadata dump, table metadata changed, allows override, has repl.last.id
    int evid = 222;
    String exportLocation = TEST_PATH + File.separator + "testMetadataReplExim";
    Path tempPath = new Path(TEST_PATH, "testMetadataReplEximTmp");
    String tempLocation = tempPath.toUri().getPath();
    String dbName = "exim";
    String tableName = "basicSrc";
    String importedTableName = "basicDst";
    List<HCatFieldSchema> cols = HCatSchemaUtils.getHCatSchema("b:string").getFields();
    client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
    client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
    HCatTable table = (new HCatTable(dbName, tableName)).cols(cols).fileFormat("textfile");
    client.createTable(HCatCreateTableDesc.create(table).build());
    HCatTable t = client.getTable(dbName, tableName);
    assertNotNull(t);
    String[] data = new String[] { "eleven", "twelve" };
    HcatTestUtils.createTestDataFile(tempLocation, data);
    CommandProcessorResponse ret = driver.run("LOAD DATA LOCAL INPATH '" + tempLocation + "' OVERWRITE INTO TABLE " + dbName + "." + tableName);
    assertEquals(ret.getResponseCode() + ":" + ret.getErrorMessage(), null, ret.getException());
    CommandProcessorResponse selectRet = driver.run("SELECT * from " + dbName + "." + tableName);
    assertEquals(selectRet.getResponseCode() + ":" + selectRet.getErrorMessage(), null, selectRet.getException());
    List<String> values = new ArrayList<String>();
    driver.getResults(values);
    assertEquals(2, values.size());
    assertEquals(data[0], values.get(0));
    assertEquals(data[1], values.get(1));
    ExportCommand exportMdCmd = new ExportCommand(dbName, tableName, null, exportLocation, true, evid);
    LOG.info("About to run :" + exportMdCmd.get().get(0));
    CommandProcessorResponse ret2 = driver.run(exportMdCmd.get().get(0));
    assertEquals(ret2.getResponseCode() + ":" + ret2.getErrorMessage(), null, ret2.getException());
    List<String> exportPaths = exportMdCmd.cleanupLocationsAfterEvent();
    assertEquals(1, exportPaths.size());
    String metadata = getMetadataContents(exportPaths.get(0));
    LOG.info("Export returned the following _metadata contents:");
    LOG.info(metadata);
    assertTrue(metadata + "did not match \"repl.scope\"=\"metadata\"", metadata.matches(".*\"repl.scope\":\"metadata\".*"));
    assertTrue(metadata + "has \"repl.last.id\"", metadata.matches(".*\"repl.last.id\":.*"));
    ImportCommand importMdCmd = new ImportCommand(dbName, importedTableName, null, exportLocation, true, evid);
    LOG.info("About to run :" + importMdCmd.get().get(0));
    CommandProcessorResponse ret3 = driver.run(importMdCmd.get().get(0));
    assertEquals(ret3.getResponseCode() + ":" + ret3.getErrorMessage(), null, ret3.getException());
    CommandProcessorResponse selectRet2 = driver.run("SELECT * from " + dbName + "." + importedTableName);
    assertEquals(selectRet2.getResponseCode() + ":" + selectRet2.getErrorMessage(), null, selectRet2.getException());
    List<String> values2 = new ArrayList<String>();
    driver.getResults(values2);
    assertEquals(0, values2.size());
    HCatTable importedTable = client.getTable(dbName, importedTableName);
    assertNotNull(importedTable);
    assertTrue(importedTable.getTblProps().containsKey("repl.last.id"));
}
Also used : Path(org.apache.hadoop.fs.Path) CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) ArrayList(java.util.ArrayList) HCatTable(org.apache.hive.hcatalog.api.HCatTable) HCatFieldSchema(org.apache.hive.hcatalog.data.schema.HCatFieldSchema) Test(org.junit.Test)

Example 4 with HCatTable

use of org.apache.hive.hcatalog.api.HCatTable in project hive by apache.

the class TestCommands method testBasicReplEximCommands.

@Test
public void testBasicReplEximCommands() throws IOException, CommandNeedRetryException {
    // repl export, has repl.last.id and repl.scope=all in it
    // import repl dump, table has repl.last.id on it (will likely be 0)
    int evid = 111;
    String exportLocation = TEST_PATH + File.separator + "testBasicReplExim";
    Path tempPath = new Path(TEST_PATH, "testBasicReplEximTmp");
    String tempLocation = tempPath.toUri().getPath();
    String dbName = "exim";
    String tableName = "basicSrc";
    String importedTableName = "basicDst";
    List<HCatFieldSchema> cols = HCatSchemaUtils.getHCatSchema("b:string").getFields();
    client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
    client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
    HCatTable table = (new HCatTable(dbName, tableName)).cols(cols).fileFormat("textfile");
    client.createTable(HCatCreateTableDesc.create(table).build());
    HCatTable t = client.getTable(dbName, tableName);
    assertNotNull(t);
    String[] data = new String[] { "eleven", "twelve" };
    HcatTestUtils.createTestDataFile(tempLocation, data);
    CommandProcessorResponse ret = driver.run("LOAD DATA LOCAL INPATH '" + tempLocation + "' OVERWRITE INTO TABLE " + dbName + "." + tableName);
    assertEquals(ret.getResponseCode() + ":" + ret.getErrorMessage(), null, ret.getException());
    CommandProcessorResponse selectRet = driver.run("SELECT * from " + dbName + "." + tableName);
    assertEquals(selectRet.getResponseCode() + ":" + selectRet.getErrorMessage(), null, selectRet.getException());
    List<String> values = new ArrayList<String>();
    driver.getResults(values);
    assertEquals(2, values.size());
    assertEquals(data[0], values.get(0));
    assertEquals(data[1], values.get(1));
    ExportCommand exportCmd = new ExportCommand(dbName, tableName, null, exportLocation, false, evid);
    LOG.info("About to run :" + exportCmd.get().get(0));
    CommandProcessorResponse ret2 = driver.run(exportCmd.get().get(0));
    assertEquals(ret2.getResponseCode() + ":" + ret2.getErrorMessage(), null, ret2.getException());
    List<String> exportPaths = exportCmd.cleanupLocationsAfterEvent();
    assertEquals(1, exportPaths.size());
    String metadata = getMetadataContents(exportPaths.get(0));
    LOG.info("Export returned the following _metadata contents:");
    LOG.info(metadata);
    assertTrue(metadata + "did not match \"repl.scope\"=\"all\"", metadata.matches(".*\"repl.scope\":\"all\".*"));
    assertTrue(metadata + "has \"repl.last.id\"", metadata.matches(".*\"repl.last.id\":.*"));
    ImportCommand importCmd = new ImportCommand(dbName, importedTableName, null, exportLocation, false, evid);
    LOG.info("About to run :" + importCmd.get().get(0));
    CommandProcessorResponse ret3 = driver.run(importCmd.get().get(0));
    assertEquals(ret3.getResponseCode() + ":" + ret3.getErrorMessage(), null, ret3.getException());
    CommandProcessorResponse selectRet2 = driver.run("SELECT * from " + dbName + "." + importedTableName);
    assertEquals(selectRet2.getResponseCode() + ":" + selectRet2.getErrorMessage(), null, selectRet2.getException());
    List<String> values2 = new ArrayList<String>();
    driver.getResults(values2);
    assertEquals(2, values2.size());
    assertEquals(data[0], values2.get(0));
    assertEquals(data[1], values2.get(1));
    HCatTable importedTable = client.getTable(dbName, importedTableName);
    assertNotNull(importedTable);
    assertTrue(importedTable.getTblProps().containsKey("repl.last.id"));
}
Also used : Path(org.apache.hadoop.fs.Path) CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) ArrayList(java.util.ArrayList) HCatTable(org.apache.hive.hcatalog.api.HCatTable) HCatFieldSchema(org.apache.hive.hcatalog.data.schema.HCatFieldSchema) Test(org.junit.Test)

Example 5 with HCatTable

use of org.apache.hive.hcatalog.api.HCatTable in project hive by apache.

the class TestCommands method testDropTableCommand2.

@Test
public void testDropTableCommand2() throws HCatException, CommandNeedRetryException, MetaException {
    // Secondary DropTableCommand test for testing repl-drop-tables' effect on partitions inside a partitioned table
    // when there exist partitions inside the table which are older than the drop event.
    // Our goal is this : Create a table t, with repl.last.id=157, say.
    // Create 2 partitions inside it, with repl.last.id=150 and 160, say.
    // Now, process a drop table command with eventid=155.
    // It should result in the table and the partition with repl.last.id=160 continuing to exist,
    // but dropping the partition with repl.last.id=150.
    String dbName = "cmd_testdb";
    String tableName = "cmd_testtable";
    int evid = 157;
    List<HCatFieldSchema> pcols = HCatSchemaUtils.getHCatSchema("b:string").getFields();
    List<HCatFieldSchema> cols = HCatSchemaUtils.getHCatSchema("a:int").getFields();
    Command testReplicatedDropCmd = new DropTableCommand(dbName, tableName, true, evid);
    client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
    client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
    Map<String, String> tprops = new HashMap<String, String>();
    tprops.put(ReplicationUtils.REPL_STATE_ID, String.valueOf(evid + 2));
    HCatTable table = (new HCatTable(dbName, tableName)).tblProps(tprops).cols(cols).partCols(pcols);
    client.createTable(HCatCreateTableDesc.create(table).build());
    HCatTable tableCreated = client.getTable(dbName, tableName);
    assertNotNull(tableCreated);
    Map<String, String> ptnDesc1 = new HashMap<String, String>();
    ptnDesc1.put("b", "test-older");
    Map<String, String> props1 = new HashMap<String, String>();
    props1.put(ReplicationUtils.REPL_STATE_ID, String.valueOf(evid - 5));
    HCatPartition ptnToAdd1 = (new HCatPartition(tableCreated, ptnDesc1, TestHCatClient.makePartLocation(tableCreated, ptnDesc1))).parameters(props1);
    client.addPartition(HCatAddPartitionDesc.create(ptnToAdd1).build());
    Map<String, String> ptnDesc2 = new HashMap<String, String>();
    ptnDesc2.put("b", "test-newer");
    Map<String, String> props2 = new HashMap<String, String>();
    props2.put(ReplicationUtils.REPL_STATE_ID, String.valueOf(evid + 5));
    HCatPartition ptnToAdd2 = (new HCatPartition(tableCreated, ptnDesc2, TestHCatClient.makePartLocation(tableCreated, ptnDesc2))).parameters(props2);
    client.addPartition(HCatAddPartitionDesc.create(ptnToAdd2).build());
    HCatPartition p1 = client.getPartition(dbName, tableName, ptnDesc1);
    assertNotNull(p1);
    HCatPartition p2 = client.getPartition(dbName, tableName, ptnDesc2);
    assertNotNull(p2);
    LOG.info("About to run :" + testReplicatedDropCmd.get().get(0));
    driver.run(testReplicatedDropCmd.get().get(0));
    HCatTable t_stillExists = client.getTable(dbName, tableName);
    assertNotNull(t_stillExists);
    HCatPartition p2_stillExists = client.getPartition(dbName, tableName, ptnDesc2);
    Exception onfe = null;
    try {
        HCatPartition p1_del = client.getPartition(dbName, tableName, ptnDesc1);
    } catch (Exception e) {
        onfe = e;
    }
    assertNotNull(onfe);
    assertTrue(onfe instanceof ObjectNotFoundException);
}
Also used : Command(org.apache.hive.hcatalog.api.repl.Command) HashMap(java.util.HashMap) ObjectNotFoundException(org.apache.hive.hcatalog.api.ObjectNotFoundException) HCatTable(org.apache.hive.hcatalog.api.HCatTable) HCatPartition(org.apache.hive.hcatalog.api.HCatPartition) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HCatException(org.apache.hive.hcatalog.common.HCatException) CommandNeedRetryException(org.apache.hadoop.hive.ql.CommandNeedRetryException) ObjectNotFoundException(org.apache.hive.hcatalog.api.ObjectNotFoundException) IOException(java.io.IOException) HCatFieldSchema(org.apache.hive.hcatalog.data.schema.HCatFieldSchema) Test(org.junit.Test)

Aggregations

HCatTable (org.apache.hive.hcatalog.api.HCatTable)5 HCatFieldSchema (org.apache.hive.hcatalog.data.schema.HCatFieldSchema)5 Test (org.junit.Test)5 IOException (java.io.IOException)3 HashMap (java.util.HashMap)3 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)3 CommandNeedRetryException (org.apache.hadoop.hive.ql.CommandNeedRetryException)3 ObjectNotFoundException (org.apache.hive.hcatalog.api.ObjectNotFoundException)3 Command (org.apache.hive.hcatalog.api.repl.Command)3 HCatException (org.apache.hive.hcatalog.common.HCatException)3 ArrayList (java.util.ArrayList)2 Path (org.apache.hadoop.fs.Path)2 CommandProcessorResponse (org.apache.hadoop.hive.ql.processors.CommandProcessorResponse)2 HCatPartition (org.apache.hive.hcatalog.api.HCatPartition)2