use of org.apache.hadoop.hive.ql.io.AcidInputFormat.DeltaMetaData in project hive by apache.
the class TestAcidInputFormat method testDeltaMetaDataReadFieldsWithStatementIdsResetsState.
@Test
public void testDeltaMetaDataReadFieldsWithStatementIdsResetsState() throws Exception {
when(mockDataInput.readLong()).thenReturn(1L, 2L);
when(mockDataInput.readInt()).thenReturn(2, 100, 101, 0);
List<Integer> statementIds = new ArrayList<>();
statementIds.add(97);
statementIds.add(98);
statementIds.add(99);
DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData(2000L, 2001L, statementIds, 0, null);
deltaMetaData.readFields(mockDataInput);
verify(mockDataInput, times(4)).readInt();
assertThat(deltaMetaData.getMinWriteId(), is(1L));
assertThat(deltaMetaData.getMaxWriteId(), is(2L));
assertThat(deltaMetaData.getStmtIds().size(), is(2));
assertThat(deltaMetaData.getStmtIds().get(0), is(100));
assertThat(deltaMetaData.getStmtIds().get(1), is(101));
}
use of org.apache.hadoop.hive.ql.io.AcidInputFormat.DeltaMetaData in project hive by apache.
the class TestAcidInputFormat method testDeltaMetaWithFileMultiStatement.
@Test
public void testDeltaMetaWithFileMultiStatement() throws Exception {
FileStatus fs = new FileStatus(200, false, 100, 100, 100, new Path("mypath"));
DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData(2000L, 2001L, Arrays.asList(97, 98, 99), 0, Collections.singletonList(new AcidInputFormat.DeltaFileMetaData(new HdfsFileStatusWithoutId(fs), 97, 1)));
assertEquals(2000L, deltaMetaData.getMinWriteId());
assertEquals(2001L, deltaMetaData.getMaxWriteId());
assertEquals(3, deltaMetaData.getStmtIds().size());
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
deltaMetaData.write(new DataOutputStream(byteArrayOutputStream));
byte[] bytes = byteArrayOutputStream.toByteArray();
DeltaMetaData copy = new DeltaMetaData();
copy.readFields(new DataInputStream(new ByteArrayInputStream(bytes)));
assertEquals(2000L, copy.getMinWriteId());
assertEquals(2001L, copy.getMaxWriteId());
assertEquals(3, copy.getStmtIds().size());
Object fileId = copy.getDeltaFiles().get(0).getFileId(new Path("deleteDelta"), 1, new HiveConf());
Assert.assertTrue(fileId instanceof SyntheticFileId);
assertEquals(100, ((SyntheticFileId) fileId).getModTime());
assertEquals(200, ((SyntheticFileId) fileId).getLength());
assertEquals(1, copy.getDeltaFilesForStmtId(97).size());
assertEquals(0, copy.getDeltaFilesForStmtId(99).size());
}
use of org.apache.hadoop.hive.ql.io.AcidInputFormat.DeltaMetaData in project hive by apache.
the class TestAcidInputFormat method testDeltaMetaDataReadFieldsNoStatementIds.
@Test
public void testDeltaMetaDataReadFieldsNoStatementIds() throws Exception {
when(mockDataInput.readLong()).thenReturn(1L, 2L);
when(mockDataInput.readInt()).thenReturn(0, 0);
DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData();
deltaMetaData.readFields(mockDataInput);
verify(mockDataInput, times(2)).readInt();
assertThat(deltaMetaData.getMinWriteId(), is(1L));
assertThat(deltaMetaData.getMaxWriteId(), is(2L));
assertThat(deltaMetaData.getStmtIds().isEmpty(), is(true));
}
use of org.apache.hadoop.hive.ql.io.AcidInputFormat.DeltaMetaData in project hive by apache.
the class TestAcidInputFormat method testDeltaMetaWithAttemptId.
@Test
public void testDeltaMetaWithAttemptId() throws Exception {
DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData(2000L, 2001L, new ArrayList<>(), 0, Collections.singletonList(new AcidInputFormat.DeltaFileMetaData(100, 200, 123, null, null, 1)));
assertEquals(2000L, deltaMetaData.getMinWriteId());
assertEquals(2001L, deltaMetaData.getMaxWriteId());
assertEquals(0, deltaMetaData.getStmtIds().size());
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
deltaMetaData.write(new DataOutputStream(byteArrayOutputStream));
byte[] bytes = byteArrayOutputStream.toByteArray();
DeltaMetaData copy = new DeltaMetaData();
copy.readFields(new DataInputStream(new ByteArrayInputStream(bytes)));
assertEquals(2000L, copy.getMinWriteId());
assertEquals(2001L, copy.getMaxWriteId());
assertEquals(0, copy.getStmtIds().size());
AcidInputFormat.DeltaFileMetaData fileMetaData = copy.getDeltaFiles().get(0);
Object fileId = fileMetaData.getFileId(new Path("deleteDelta"), 1, new HiveConf());
Assert.assertTrue(fileId instanceof SyntheticFileId);
assertEquals(100, ((SyntheticFileId) fileId).getModTime());
assertEquals(200, ((SyntheticFileId) fileId).getLength());
String fileName = fileMetaData.getPath(new Path("deleteDelta"), 1).getName();
Assert.assertEquals("bucket_00001_123", fileName);
}
use of org.apache.hadoop.hive.ql.io.AcidInputFormat.DeltaMetaData in project hive by apache.
the class TestAcidInputFormat method testDeltaMetaDataReadFieldsWithStatementIds.
@Test
public void testDeltaMetaDataReadFieldsWithStatementIds() throws Exception {
when(mockDataInput.readLong()).thenReturn(1L, 2L);
when(mockDataInput.readInt()).thenReturn(2, 100, 101, 0);
DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData();
deltaMetaData.readFields(mockDataInput);
verify(mockDataInput, times(4)).readInt();
assertThat(deltaMetaData.getMinWriteId(), is(1L));
assertThat(deltaMetaData.getMaxWriteId(), is(2L));
assertThat(deltaMetaData.getStmtIds().size(), is(2));
assertThat(deltaMetaData.getStmtIds().get(0), is(100));
assertThat(deltaMetaData.getStmtIds().get(1), is(101));
}
Aggregations