use of org.apache.thrift.TException in project hive by apache.
the class HCatClientHMSImpl method dropPartitions.
@Override
public void dropPartitions(String dbName, String tableName, Map<String, String> partitionSpec, boolean ifExists, boolean deleteData) throws HCatException {
LOG.info("HCatClient dropPartitions(db=" + dbName + ",table=" + tableName + ", partitionSpec: [" + partitionSpec + "]).");
try {
dbName = checkDB(dbName);
Table table = hmsClient.getTable(dbName, tableName);
if (hiveConfig.getBoolVar(HiveConf.ConfVars.METASTORE_CLIENT_DROP_PARTITIONS_WITH_EXPRESSIONS)) {
try {
dropPartitionsUsingExpressions(table, partitionSpec, ifExists, deleteData);
} catch (SemanticException parseFailure) {
LOG.warn("Could not push down partition-specification to back-end, for dropPartitions(). Resorting to iteration.", parseFailure);
dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists, deleteData);
}
} else {
// Not using expressions.
dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists, deleteData);
}
} catch (NoSuchObjectException e) {
throw new ObjectNotFoundException("NoSuchObjectException while dropping partition. " + "Either db(" + dbName + ") or table(" + tableName + ") missing.", e);
} catch (MetaException e) {
throw new HCatException("MetaException while dropping partition.", e);
} catch (TException e) {
throw new ConnectionFailureException("TException while dropping partition.", e);
}
}
use of org.apache.thrift.TException in project hive by apache.
the class HCatClientHMSImpl method listPartitionsByFilter.
@Override
public List<HCatPartition> listPartitionsByFilter(String dbName, String tblName, String filter) throws HCatException {
List<HCatPartition> hcatPtns = new ArrayList<HCatPartition>();
try {
HCatTable table = getTable(dbName, tblName);
List<Partition> hivePtns = hmsClient.listPartitionsByFilter(table.getDbName(), table.getTableName(), filter, (short) -1);
for (Partition ptn : hivePtns) {
hcatPtns.add(new HCatPartition(table, ptn));
}
} catch (MetaException e) {
throw new HCatException("MetaException while fetching partitions.", e);
} catch (NoSuchObjectException e) {
throw new ObjectNotFoundException("NoSuchObjectException while fetching partitions.", e);
} catch (TException e) {
throw new ConnectionFailureException("TException while fetching partitions.", e);
}
return hcatPtns;
}
use of org.apache.thrift.TException in project hive by apache.
the class HCatClientHMSImpl method getPartitions.
@Override
public List<HCatPartition> getPartitions(String dbName, String tblName) throws HCatException {
List<HCatPartition> hcatPtns = new ArrayList<HCatPartition>();
try {
HCatTable hcatTable = getTable(dbName, tblName);
List<Partition> hivePtns = hmsClient.listPartitions(checkDB(dbName), tblName, (short) -1);
for (Partition ptn : hivePtns) {
hcatPtns.add(new HCatPartition(hcatTable, ptn));
}
} catch (NoSuchObjectException e) {
throw new ObjectNotFoundException("NoSuchObjectException while retrieving partition.", e);
} catch (MetaException e) {
throw new HCatException("MetaException while retrieving partition.", e);
} catch (TException e) {
throw new ConnectionFailureException("TException while retrieving partition.", e);
}
return hcatPtns;
}
use of org.apache.thrift.TException in project hive by apache.
the class TestReplicationScenarios method testDropsWithCM.
@Test
public void testDropsWithCM() throws IOException {
String testName = "drops_with_cm";
LOG.info("Testing " + testName);
String dbName = testName + "_" + tid;
run("CREATE DATABASE " + dbName);
run("CREATE TABLE " + dbName + ".unptned(a string) STORED AS TEXTFILE");
run("CREATE TABLE " + dbName + ".ptned(a string) partitioned by (b string) STORED AS TEXTFILE");
run("CREATE TABLE " + dbName + ".ptned2(a string) partitioned by (b string) STORED AS TEXTFILE");
String[] unptn_data = new String[] { "eleven", "twelve" };
String[] ptn_data_1 = new String[] { "thirteen", "fourteen", "fifteen" };
String[] ptn_data_2 = new String[] { "fifteen", "sixteen", "seventeen" };
String[] empty = new String[] {};
String unptn_locn = new Path(TEST_PATH, testName + "_unptn").toUri().getPath();
String ptn_locn_1 = new Path(TEST_PATH, testName + "_ptn1").toUri().getPath();
String ptn_locn_2 = new Path(TEST_PATH, testName + "_ptn2").toUri().getPath();
createTestDataFile(unptn_locn, unptn_data);
createTestDataFile(ptn_locn_1, ptn_data_1);
createTestDataFile(ptn_locn_2, ptn_data_2);
run("LOAD DATA LOCAL INPATH '" + unptn_locn + "' OVERWRITE INTO TABLE " + dbName + ".unptned");
run("SELECT * from " + dbName + ".unptned");
verifyResults(unptn_data);
run("LOAD DATA LOCAL INPATH '" + ptn_locn_1 + "' OVERWRITE INTO TABLE " + dbName + ".ptned PARTITION(b='1')");
run("SELECT a from " + dbName + ".ptned WHERE b='1'");
verifyResults(ptn_data_1);
run("LOAD DATA LOCAL INPATH '" + ptn_locn_2 + "' OVERWRITE INTO TABLE " + dbName + ".ptned PARTITION(b='2')");
run("SELECT a from " + dbName + ".ptned WHERE b='2'");
verifyResults(ptn_data_2);
run("LOAD DATA LOCAL INPATH '" + ptn_locn_1 + "' OVERWRITE INTO TABLE " + dbName + ".ptned2 PARTITION(b='1')");
run("SELECT a from " + dbName + ".ptned2 WHERE b='1'");
verifyResults(ptn_data_1);
run("LOAD DATA LOCAL INPATH '" + ptn_locn_2 + "' OVERWRITE INTO TABLE " + dbName + ".ptned2 PARTITION(b='2')");
run("SELECT a from " + dbName + ".ptned2 WHERE b='2'");
verifyResults(ptn_data_2);
advanceDumpDir();
run("REPL DUMP " + dbName);
String replDumpLocn = getResult(0, 0);
String replDumpId = getResult(0, 1, true);
run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'");
printOutput();
run("REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'");
run("REPL STATUS " + dbName + "_dupe");
verifyResults(new String[] { replDumpId });
run("SELECT * from " + dbName + "_dupe.unptned");
verifyResults(unptn_data);
run("SELECT a from " + dbName + "_dupe.ptned WHERE b='1'");
verifyResults(ptn_data_1);
run("SELECT a from " + dbName + "_dupe.ptned WHERE b='2'");
verifyResults(ptn_data_2);
run("SELECT a from " + dbName + "_dupe.ptned2 WHERE b='1'");
verifyResults(ptn_data_1);
run("SELECT a from " + dbName + "_dupe.ptned2 WHERE b='2'");
verifyResults(ptn_data_2);
run("CREATE TABLE " + dbName + ".unptned_copy" + " AS SELECT a FROM " + dbName + ".unptned");
run("CREATE TABLE " + dbName + ".ptned_copy" + " LIKE " + dbName + ".ptned");
run("INSERT INTO TABLE " + dbName + ".ptned_copy" + " PARTITION(b='1') SELECT a FROM " + dbName + ".ptned WHERE b='1'");
run("SELECT a from " + dbName + ".unptned_copy");
verifyResults(unptn_data);
run("SELECT a from " + dbName + ".ptned_copy");
verifyResults(ptn_data_1);
run("DROP TABLE " + dbName + ".unptned");
run("ALTER TABLE " + dbName + ".ptned DROP PARTITION (b='2')");
run("DROP TABLE " + dbName + ".ptned2");
run("SELECT a from " + dbName + ".ptned WHERE b=2");
verifyResults(empty);
run("SELECT a from " + dbName + ".ptned");
verifyResults(ptn_data_1);
advanceDumpDir();
run("REPL DUMP " + dbName + " FROM " + replDumpId);
String postDropReplDumpLocn = getResult(0, 0);
String postDropReplDumpId = getResult(0, 1, true);
LOG.info("Dumped to {} with id {}->{}", postDropReplDumpLocn, replDumpId, postDropReplDumpId);
// Drop table after dump
run("DROP TABLE " + dbName + ".unptned_copy");
// Drop partition after dump
run("ALTER TABLE " + dbName + ".ptned_copy DROP PARTITION(b='1')");
run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + postDropReplDumpLocn + "'");
printOutput();
run("REPL LOAD " + dbName + "_dupe FROM '" + postDropReplDumpLocn + "'");
Exception e = null;
try {
Table tbl = metaStoreClient.getTable(dbName + "_dupe", "unptned");
assertNull(tbl);
} catch (TException te) {
e = te;
}
assertNotNull(e);
assertEquals(NoSuchObjectException.class, e.getClass());
run("SELECT a from " + dbName + "_dupe.ptned WHERE b=2");
verifyResults(empty);
run("SELECT a from " + dbName + "_dupe.ptned");
verifyResults(ptn_data_1);
Exception e2 = null;
try {
Table tbl = metaStoreClient.getTable(dbName + "_dupe", "ptned2");
assertNull(tbl);
} catch (TException te) {
e2 = te;
}
assertNotNull(e2);
assertEquals(NoSuchObjectException.class, e.getClass());
run("SELECT a from " + dbName + "_dupe.unptned_copy");
verifyResults(unptn_data);
run("SELECT a from " + dbName + "_dupe.ptned_copy");
verifyResults(ptn_data_1);
}
use of org.apache.thrift.TException in project hive by apache.
the class TestReplicationScenarios method testDrops.
@Test
public void testDrops() throws IOException {
String testName = "drops";
LOG.info("Testing " + testName);
String dbName = testName + "_" + tid;
run("CREATE DATABASE " + dbName);
run("CREATE TABLE " + dbName + ".unptned(a string) STORED AS TEXTFILE");
run("CREATE TABLE " + dbName + ".ptned(a string) partitioned by (b string) STORED AS TEXTFILE");
run("CREATE TABLE " + dbName + ".ptned2(a string) partitioned by (b string) STORED AS TEXTFILE");
run("CREATE TABLE " + dbName + ".ptned3(a string) partitioned by (b int) STORED AS TEXTFILE");
String[] unptn_data = new String[] { "eleven", "twelve" };
String[] ptn_data_1 = new String[] { "thirteen", "fourteen", "fifteen" };
String[] ptn_data_2 = new String[] { "fifteen", "sixteen", "seventeen" };
String[] empty = new String[] {};
String unptn_locn = new Path(TEST_PATH, testName + "_unptn").toUri().getPath();
String ptn_locn_1 = new Path(TEST_PATH, testName + "_ptn1").toUri().getPath();
String ptn_locn_2 = new Path(TEST_PATH, testName + "_ptn2").toUri().getPath();
createTestDataFile(unptn_locn, unptn_data);
createTestDataFile(ptn_locn_1, ptn_data_1);
createTestDataFile(ptn_locn_2, ptn_data_2);
run("LOAD DATA LOCAL INPATH '" + unptn_locn + "' OVERWRITE INTO TABLE " + dbName + ".unptned");
verifySetup("SELECT * from " + dbName + ".unptned", unptn_data);
run("LOAD DATA LOCAL INPATH '" + ptn_locn_1 + "' OVERWRITE INTO TABLE " + dbName + ".ptned PARTITION(b='1')");
verifySetup("SELECT a from " + dbName + ".ptned WHERE b='1'", ptn_data_1);
run("LOAD DATA LOCAL INPATH '" + ptn_locn_2 + "' OVERWRITE INTO TABLE " + dbName + ".ptned PARTITION(b='2')");
verifySetup("SELECT a from " + dbName + ".ptned WHERE b='2'", ptn_data_2);
run("LOAD DATA LOCAL INPATH '" + ptn_locn_1 + "' OVERWRITE INTO TABLE " + dbName + ".ptned2 PARTITION(b='1')");
verifySetup("SELECT a from " + dbName + ".ptned2 WHERE b='1'", ptn_data_1);
run("LOAD DATA LOCAL INPATH '" + ptn_locn_2 + "' OVERWRITE INTO TABLE " + dbName + ".ptned2 PARTITION(b='2')");
verifySetup("SELECT a from " + dbName + ".ptned2 WHERE b='2'", ptn_data_2);
run("LOAD DATA LOCAL INPATH '" + ptn_locn_1 + "' OVERWRITE INTO TABLE " + dbName + ".ptned3 PARTITION(b=1)");
verifySetup("SELECT a from " + dbName + ".ptned2 WHERE b=1", ptn_data_1);
run("LOAD DATA LOCAL INPATH '" + ptn_locn_2 + "' OVERWRITE INTO TABLE " + dbName + ".ptned3 PARTITION(b=2)");
verifySetup("SELECT a from " + dbName + ".ptned2 WHERE b=2", ptn_data_2);
// At this point, we've set up all the tables and ptns we're going to test drops across
// Replicate it first, and then we'll drop it on the source.
advanceDumpDir();
run("REPL DUMP " + dbName);
String replDumpLocn = getResult(0, 0);
String replDumpId = getResult(0, 1, true);
run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'");
printOutput();
run("REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'");
verifySetup("REPL STATUS " + dbName + "_dupe", new String[] { replDumpId });
verifySetup("SELECT * from " + dbName + "_dupe.unptned", unptn_data);
verifySetup("SELECT a from " + dbName + "_dupe.ptned WHERE b='1'", ptn_data_1);
verifySetup("SELECT a from " + dbName + "_dupe.ptned WHERE b='2'", ptn_data_2);
verifySetup("SELECT a from " + dbName + "_dupe.ptned2 WHERE b='1'", ptn_data_1);
verifySetup("SELECT a from " + dbName + "_dupe.ptned2 WHERE b='2'", ptn_data_2);
verifySetup("SELECT a from " + dbName + "_dupe.ptned3 WHERE b=1", ptn_data_1);
verifySetup("SELECT a from " + dbName + "_dupe.ptned3 WHERE b=2", ptn_data_2);
// All tables good on destination, drop on source.
run("DROP TABLE " + dbName + ".unptned");
run("ALTER TABLE " + dbName + ".ptned DROP PARTITION (b='2')");
run("DROP TABLE " + dbName + ".ptned2");
run("ALTER TABLE " + dbName + ".ptned3 DROP PARTITION (b=1)");
verifySetup("SELECT a from " + dbName + ".ptned WHERE b='2'", empty);
verifySetup("SELECT a from " + dbName + ".ptned", ptn_data_1);
verifySetup("SELECT a from " + dbName + ".ptned3 WHERE b=1", empty);
verifySetup("SELECT a from " + dbName + ".ptned3", ptn_data_2);
// replicate the incremental drops
advanceDumpDir();
run("REPL DUMP " + dbName + " FROM " + replDumpId);
String postDropReplDumpLocn = getResult(0, 0);
String postDropReplDumpId = getResult(0, 1, true);
LOG.info("Dumped to {} with id {}->{}", postDropReplDumpLocn, replDumpId, postDropReplDumpId);
run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + postDropReplDumpLocn + "'");
printOutput();
run("REPL LOAD " + dbName + "_dupe FROM '" + postDropReplDumpLocn + "'");
// verify that drops were replicated. This can either be from tables or ptns
// not existing, and thus, throwing a NoSuchObjectException, or returning nulls
// or select * returning empty, depending on what we're testing.
Exception e = null;
try {
Table tbl = metaStoreClient.getTable(dbName + "_dupe", "unptned");
assertNull(tbl);
} catch (TException te) {
e = te;
}
assertNotNull(e);
assertEquals(NoSuchObjectException.class, e.getClass());
verifyRun("SELECT a from " + dbName + "_dupe.ptned WHERE b='2'", empty);
verifyRun("SELECT a from " + dbName + "_dupe.ptned", ptn_data_1);
verifyRun("SELECT a from " + dbName + "_dupe.ptned3 WHERE b=1", empty);
verifyRun("SELECT a from " + dbName + "_dupe.ptned3", ptn_data_2);
Exception e2 = null;
try {
Table tbl = metaStoreClient.getTable(dbName + "_dupe", "ptned2");
assertNull(tbl);
} catch (TException te) {
e2 = te;
}
assertNotNull(e2);
assertEquals(NoSuchObjectException.class, e.getClass());
}
Aggregations