use of org.apache.hadoop.hive.ql.io.AcidInputFormat.DeltaMetaData in project hive by apache.
the class TestAcidInputFormat method testDeltaMetaConstructWithState.
@Test
public void testDeltaMetaConstructWithState() throws Exception {
DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData(2000L, 2001L, Arrays.asList(97, 98, 99), 0, null);
assertThat(deltaMetaData.getMinWriteId(), is(2000L));
assertThat(deltaMetaData.getMaxWriteId(), is(2001L));
assertThat(deltaMetaData.getStmtIds().size(), is(3));
assertThat(deltaMetaData.getStmtIds().get(0), is(97));
assertThat(deltaMetaData.getStmtIds().get(1), is(98));
assertThat(deltaMetaData.getStmtIds().get(2), is(99));
}
use of org.apache.hadoop.hive.ql.io.AcidInputFormat.DeltaMetaData in project hive by apache.
the class TestAcidInputFormat method testDeltaMetaWithHdfsFileId.
@Test
public void testDeltaMetaWithHdfsFileId() throws Exception {
DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData(2000L, 2001L, new ArrayList<>(), 0, Collections.singletonList(new AcidInputFormat.DeltaFileMetaData(100, 200, null, 123L, null, 1)));
assertEquals(2000L, deltaMetaData.getMinWriteId());
assertEquals(2001L, deltaMetaData.getMaxWriteId());
assertEquals(0, deltaMetaData.getStmtIds().size());
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
deltaMetaData.write(new DataOutputStream(byteArrayOutputStream));
byte[] bytes = byteArrayOutputStream.toByteArray();
DeltaMetaData copy = new DeltaMetaData();
copy.readFields(new DataInputStream(new ByteArrayInputStream(bytes)));
assertEquals(2000L, copy.getMinWriteId());
assertEquals(2001L, copy.getMaxWriteId());
assertEquals(0, copy.getStmtIds().size());
AcidInputFormat.DeltaFileMetaData fileMetaData = copy.getDeltaFiles().get(0);
Object fileId = fileMetaData.getFileId(new Path("deleteDelta"), 1, new HiveConf());
Assert.assertTrue(fileId instanceof Long);
long fId = (Long) fileId;
assertEquals(123L, fId);
String fileName = fileMetaData.getPath(new Path("deleteDelta"), 1).getName();
Assert.assertEquals("bucket_00001", fileName);
}
use of org.apache.hadoop.hive.ql.io.AcidInputFormat.DeltaMetaData in project hive by apache.
the class TestAcidInputFormat method testDeltaMetaWithFile.
@Test
public void testDeltaMetaWithFile() throws Exception {
FileStatus fs = new FileStatus(200, false, 100, 100, 100, new Path("mypath"));
DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData(2000L, 2001L, new ArrayList<>(), 0, Collections.singletonList(new AcidInputFormat.DeltaFileMetaData(new HdfsFileStatusWithoutId(fs), null, 1)));
assertEquals(2000L, deltaMetaData.getMinWriteId());
assertEquals(2001L, deltaMetaData.getMaxWriteId());
assertEquals(0, deltaMetaData.getStmtIds().size());
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
deltaMetaData.write(new DataOutputStream(byteArrayOutputStream));
byte[] bytes = byteArrayOutputStream.toByteArray();
DeltaMetaData copy = new DeltaMetaData();
copy.readFields(new DataInputStream(new ByteArrayInputStream(bytes)));
assertEquals(2000L, copy.getMinWriteId());
assertEquals(2001L, copy.getMaxWriteId());
assertEquals(0, copy.getStmtIds().size());
AcidInputFormat.DeltaFileMetaData fileMetaData = copy.getDeltaFiles().get(0);
Object fileId = fileMetaData.getFileId(new Path("deleteDelta"), 1, new HiveConf());
Assert.assertTrue(fileId instanceof SyntheticFileId);
assertEquals(100, ((SyntheticFileId) fileId).getModTime());
assertEquals(200, ((SyntheticFileId) fileId).getLength());
String fileName = fileMetaData.getPath(new Path("deleteDelta"), 1).getName();
Assert.assertEquals("bucket_00001", fileName);
}
Aggregations