use of org.apache.iceberg.data.Record in project hive by apache.
the class TestHiveIcebergStorageHandlerLocalScan method runCreateAndReadTest.
private void runCreateAndReadTest(TableIdentifier identifier, String createSQL, Schema expectedSchema, PartitionSpec expectedSpec, Map<StructLike, List<Record>> data) throws IOException {
shell.executeStatement(createSQL);
org.apache.iceberg.Table icebergTable = testTables.loadTable(identifier);
Assert.assertEquals(expectedSchema.asStruct(), icebergTable.schema().asStruct());
Assert.assertEquals(expectedSpec, icebergTable.spec());
List<Record> expected = Lists.newArrayList();
for (StructLike partition : data.keySet()) {
testTables.appendIcebergTable(shell.getHiveConf(), icebergTable, fileFormat, partition, data.get(partition));
expected.addAll(data.get(partition));
}
List<Object[]> descRows = shell.executeStatement("SELECT * FROM " + identifier.toString());
List<Record> records = HiveIcebergTestUtils.valueForRow(icebergTable.schema(), descRows);
HiveIcebergTestUtils.validateData(expected, records, 0);
}
use of org.apache.iceberg.data.Record in project hive by apache.
the class TestHiveIcebergStorageHandlerLocalScan method testArrayOfStructsInTable.
@Test
public void testArrayOfStructsInTable() throws IOException {
Schema schema = new Schema(required(1, "arrayofstructs", Types.ListType.ofRequired(2, Types.StructType.of(required(3, "something", Types.DoubleType.get()), required(4, "someone", Types.LongType.get()), required(5, "somewhere", Types.StringType.get())))));
List<Record> records = testTables.createTableWithGeneratedRecords(shell, "arraytable", schema, fileFormat, 1);
// access an element from a struct in an array
for (int i = 0; i < records.size(); i++) {
List<?> expectedList = (List<?>) records.get(i).getField("arrayofstructs");
for (int j = 0; j < expectedList.size(); j++) {
List<Object[]> queryResult = shell.executeStatement(String.format("SELECT arrayofstructs[%d].something, " + "arrayofstructs[%d].someone, arrayofstructs[%d].somewhere FROM default.arraytable LIMIT 1 " + "OFFSET %d", j, j, j, i));
GenericRecord genericRecord = (GenericRecord) expectedList.get(j);
Assert.assertEquals(genericRecord.getField("something"), queryResult.get(0)[0]);
Assert.assertEquals(genericRecord.getField("someone"), queryResult.get(0)[1]);
Assert.assertEquals(genericRecord.getField("somewhere"), queryResult.get(0)[2]);
}
}
}
use of org.apache.iceberg.data.Record in project hive by apache.
the class TestHiveIcebergStorageHandlerLocalScan method testMapOfMapsInTable.
@Test
public void testMapOfMapsInTable() throws IOException {
Schema schema = new Schema(required(1, "mapofmaps", Types.MapType.ofRequired(2, 3, Types.StringType.get(), Types.MapType.ofRequired(4, 5, Types.StringType.get(), Types.StringType.get()))));
List<Record> records = testTables.createTableWithGeneratedRecords(shell, "maptable", schema, fileFormat, 1);
// access a single element from a map in a map
for (int i = 0; i < records.size(); i++) {
Map<?, ?> expectedMap = (Map<?, ?>) records.get(i).getField("mapofmaps");
for (Map.Entry<?, ?> entry : expectedMap.entrySet()) {
Map<?, ?> expectedInnerMap = (Map<?, ?>) entry.getValue();
for (Map.Entry<?, ?> innerEntry : expectedInnerMap.entrySet()) {
List<Object[]> queryResult = shell.executeStatement(String.format("SELECT mapofmaps[\"%s\"]" + "[\"%s\"] FROM maptable LIMIT 1 OFFSET %d", entry.getKey(), innerEntry.getKey(), i));
Assert.assertEquals(innerEntry.getValue(), queryResult.get(0)[0]);
}
}
}
}
use of org.apache.iceberg.data.Record in project hive by apache.
the class TestHiveIcebergStorageHandlerLocalScan method testArrayOfPrimitivesInTable.
@Test
public void testArrayOfPrimitivesInTable() throws IOException {
Schema schema = new Schema(required(1, "arrayofprimitives", Types.ListType.ofRequired(2, Types.IntegerType.get())));
List<Record> records = testTables.createTableWithGeneratedRecords(shell, "arraytable", schema, fileFormat, 1);
// access a single element from the array
for (int i = 0; i < records.size(); i++) {
List<?> expectedList = (List<?>) records.get(i).getField("arrayofprimitives");
for (int j = 0; j < expectedList.size(); j++) {
List<Object[]> queryResult = shell.executeStatement(String.format("SELECT arrayofprimitives[%d] FROM default.arraytable " + "LIMIT 1 OFFSET %d", j, i));
Assert.assertEquals(expectedList.get(j), queryResult.get(0)[0]);
}
}
}
use of org.apache.iceberg.data.Record in project hive by apache.
the class TestHiveIcebergStorageHandlerTimezone method testDateQuery.
@Test
public void testDateQuery() throws IOException {
Schema dateSchema = new Schema(optional(1, "d_date", Types.DateType.get()));
List<Record> records = TestHelper.RecordsBuilder.newInstance(dateSchema).add(LocalDate.of(2020, 1, 21)).add(LocalDate.of(2020, 1, 24)).build();
testTables.createTable(shell, "date_test", dateSchema, FileFormat.PARQUET, records);
List<Object[]> result = shell.executeStatement("SELECT * from date_test WHERE d_date='2020-01-21'");
Assert.assertEquals(1, result.size());
Assert.assertEquals("2020-01-21", result.get(0)[0]);
result = shell.executeStatement("SELECT * from date_test WHERE d_date in ('2020-01-21', '2020-01-22')");
Assert.assertEquals(1, result.size());
Assert.assertEquals("2020-01-21", result.get(0)[0]);
result = shell.executeStatement("SELECT * from date_test WHERE d_date > '2020-01-21'");
Assert.assertEquals(1, result.size());
Assert.assertEquals("2020-01-24", result.get(0)[0]);
result = shell.executeStatement("SELECT * from date_test WHERE d_date='2020-01-20'");
Assert.assertEquals(0, result.size());
}
Aggregations