use of org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement in project hive by apache.
the class TestWorker method minorTableWithBase.
@Test
public void minorTableWithBase() throws Exception {
LOG.debug("Starting minorTableWithBase");
Table t = newTable("default", "mtwb", false);
addBaseFile(t, null, 20L, 20);
addDeltaFile(t, null, 21L, 22L, 2);
addDeltaFile(t, null, 23L, 24L, 2);
burnThroughTransactions("default", "mtwb", 25);
CompactionRequest rqst = new CompactionRequest("default", "mtwb", CompactionType.MINOR);
txnHandler.compact(rqst);
// adds delta and delete_delta
startWorker();
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
Assert.assertEquals(1, compacts.size());
Assert.assertEquals("ready for cleaning", compacts.get(0).getState());
// There should still now be 5 directories in the location
FileSystem fs = FileSystem.get(conf);
FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(5, stat.length);
// Find the new delta file and make sure it has the right contents
boolean sawNewDelta = false;
for (int i = 0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21, 24) + "_v0000026")) {
sawNewDelta = true;
FileStatus[] buckets = fs.listStatus(stat[i].getPath(), AcidUtils.hiddenFileFilter);
Assert.assertEquals(2, buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
Assert.assertEquals(104L, buckets[0].getLen());
Assert.assertEquals(104L, buckets[1].getLen());
}
if (stat[i].getPath().getName().equals(makeDeleteDeltaDirNameCompacted(21, 24) + "_v0000026")) {
sawNewDelta = true;
FileStatus[] buckets = fs.listStatus(stat[i].getPath(), AcidUtils.hiddenFileFilter);
Assert.assertEquals(2, buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
Assert.assertEquals(104L, buckets[0].getLen());
Assert.assertEquals(104L, buckets[1].getLen());
} else {
LOG.debug("This is not the delta file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(toString(stat), sawNewDelta);
}
use of org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement in project hive by apache.
the class TestWorker method majorPartitionWithBase.
@Test
public void majorPartitionWithBase() throws Exception {
LOG.debug("Starting majorPartitionWithBase");
Table t = newTable("default", "mapwb", true);
Partition p = newPartition(t, "today");
addBaseFile(t, p, 20L, 20);
addDeltaFile(t, p, 21L, 22L, 2);
addDeltaFile(t, p, 23L, 24L, 2);
burnThroughTransactions("default", "mapwb", 25);
CompactionRequest rqst = new CompactionRequest("default", "mapwb", CompactionType.MAJOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
Assert.assertEquals(1, compacts.size());
Assert.assertEquals("ready for cleaning", compacts.get(0).getState());
// There should still be four directories in the location.
FileSystem fs = FileSystem.get(conf);
FileStatus[] stat = fs.listStatus(new Path(p.getSd().getLocation()));
Assert.assertEquals(4, stat.length);
// Find the new delta file and make sure it has the right contents
boolean sawNewBase = false;
for (int i = 0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals("base_0000024_v0000026")) {
sawNewBase = true;
FileStatus[] buckets = fs.listStatus(stat[i].getPath(), FileUtils.HIDDEN_FILES_PATH_FILTER);
Assert.assertEquals(2, buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
Assert.assertEquals(624L, buckets[0].getLen());
Assert.assertEquals(624L, buckets[1].getLen());
} else {
LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(toString(stat), sawNewBase);
}
use of org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement in project hive by apache.
the class TestWorker method majorWithOpenInMiddle.
@Test
public void majorWithOpenInMiddle() throws Exception {
LOG.debug("Starting majorWithOpenInMiddle");
Table t = newTable("default", "mtwb", false);
addBaseFile(t, null, 20L, 20);
addDeltaFile(t, null, 21L, 22L, 2);
addDeltaFile(t, null, 23L, 25L, 3);
addLengthFile(t, null, 23L, 25L, 3);
addDeltaFile(t, null, 26L, 27L, 2);
burnThroughTransactions("default", "mtwb", 27, new HashSet<Long>(Arrays.asList(23L)), null);
CompactionRequest rqst = new CompactionRequest("default", "mtwb", CompactionType.MAJOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
Assert.assertEquals(1, compacts.size());
Assert.assertEquals("ready for cleaning", compacts.get(0).getState());
// There should still now be 5 directories in the location
FileSystem fs = FileSystem.get(conf);
FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(5, stat.length);
// Find the new delta file and make sure it has the right contents
Arrays.sort(stat);
Assert.assertEquals("base_0000022_v0000028", stat[0].getPath().getName());
Assert.assertEquals("base_20", stat[1].getPath().getName());
Assert.assertEquals(makeDeltaDirName(21, 22), stat[2].getPath().getName());
Assert.assertEquals(makeDeltaDirName(23, 25), stat[3].getPath().getName());
Assert.assertEquals(makeDeltaDirName(26, 27), stat[4].getPath().getName());
}
use of org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement in project hive by apache.
the class TestWorker method minorTableLegacy.
@Test
public void minorTableLegacy() throws Exception {
LOG.debug("Starting minorTableLegacy");
Table t = newTable("default", "mtl", false);
addLegacyFile(t, null, 20);
addDeltaFile(t, null, 21L, 22L, 2);
addDeltaFile(t, null, 23L, 24L, 2);
burnThroughTransactions("default", "mtl", 25);
CompactionRequest rqst = new CompactionRequest("default", "mtl", CompactionType.MINOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
Assert.assertEquals(1, compacts.size());
Assert.assertEquals("ready for cleaning", compacts.get(0).getState());
// There should still now be 5 directories in the location
FileSystem fs = FileSystem.get(conf);
FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation()));
// Find the new delta file and make sure it has the right contents
boolean sawNewDelta = false;
for (int i = 0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21, 24) + "_v0000026")) {
sawNewDelta = true;
FileStatus[] buckets = fs.listStatus(stat[i].getPath(), AcidUtils.hiddenFileFilter);
Assert.assertEquals(2, buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
} else {
LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(toString(stat), sawNewDelta);
}
use of org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement in project hive by apache.
the class TestWorker method minorWithAborted.
@Test
public void minorWithAborted() throws Exception {
LOG.debug("Starting minorWithAborted");
Table t = newTable("default", "mtwb", false);
addBaseFile(t, null, 20L, 20);
addDeltaFile(t, null, 21L, 22L, 2);
addDeltaFile(t, null, 23L, 25L, 3);
addLengthFile(t, null, 23L, 25L, 3);
addDeltaFile(t, null, 26L, 27L, 2);
burnThroughTransactions("default", "mtwb", 27, null, new HashSet<Long>(Arrays.asList(24L, 25L)));
CompactionRequest rqst = new CompactionRequest("default", "mtwb", CompactionType.MINOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
Assert.assertEquals(1, compacts.size());
Assert.assertEquals("ready for cleaning", compacts.get(0).getState());
// There should still now be 6 directories in the location
FileSystem fs = FileSystem.get(conf);
FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(6, stat.length);
// Find the new delta file and make sure it has the right contents
Arrays.sort(stat);
Assert.assertEquals("base_20", stat[0].getPath().getName());
Assert.assertEquals(makeDeleteDeltaDirNameCompacted(21, 27) + "_v0000028", stat[1].getPath().getName());
Assert.assertEquals(makeDeltaDirName(21, 22), stat[2].getPath().getName());
Assert.assertEquals(makeDeltaDirNameCompacted(21, 27) + "_v0000028", stat[3].getPath().getName());
Assert.assertEquals(makeDeltaDirName(23, 25), stat[4].getPath().getName());
Assert.assertEquals(makeDeltaDirName(26, 27), stat[5].getPath().getName());
}
Aggregations