use of org.apache.iceberg.util.Pair in project hive by apache.
the class DeleteReadTests method testPositionDeletes.
@Test
public void testPositionDeletes() throws IOException {
List<Pair<CharSequence, Long>> deletes = Lists.newArrayList(// id = 29
Pair.of(dataFile.path(), 0L), // id = 89
Pair.of(dataFile.path(), 3L), // id = 122
Pair.of(dataFile.path(), 6L));
Pair<DeleteFile, Set<CharSequence>> posDeletes = FileHelpers.writeDeleteFile(table, Files.localOutput(temp.newFile()), Row.of(0), deletes);
table.newRowDelta().addDeletes(posDeletes.first()).validateDataFilesExist(posDeletes.second()).commit();
StructLikeSet expected = rowSetWithoutIds(29, 89, 122);
StructLikeSet actual = rowSet(tableName, table, "*");
Assert.assertEquals("Table should contain expected rows", expected, actual);
}
use of org.apache.iceberg.util.Pair in project hive by apache.
the class DeleteReadTests method testMixedPositionAndEqualityDeletes.
@Test
public void testMixedPositionAndEqualityDeletes() throws IOException {
Schema dataSchema = table.schema().select("data");
Record dataDelete = GenericRecord.create(dataSchema);
List<Record> dataDeletes = Lists.newArrayList(// id = 29
dataDelete.copy("data", "a"), // id = 89
dataDelete.copy("data", "d"), // id = 122
dataDelete.copy("data", "g"));
DeleteFile eqDeletes = FileHelpers.writeDeleteFile(table, Files.localOutput(temp.newFile()), Row.of(0), dataDeletes, dataSchema);
List<Pair<CharSequence, Long>> deletes = Lists.newArrayList(// id = 89
Pair.of(dataFile.path(), 3L), // id = 121
Pair.of(dataFile.path(), 5L));
Pair<DeleteFile, Set<CharSequence>> posDeletes = FileHelpers.writeDeleteFile(table, Files.localOutput(temp.newFile()), Row.of(0), deletes);
table.newRowDelta().addDeletes(eqDeletes).addDeletes(posDeletes.first()).validateDataFilesExist(posDeletes.second()).commit();
StructLikeSet expected = rowSetWithoutIds(29, 89, 121, 122);
StructLikeSet actual = rowSet(tableName, table, "*");
Assert.assertEquals("Table should contain expected rows", expected, actual);
}
Aggregations