use of com.facebook.presto.testing.MaterializedRow in project presto by prestodb.
the class AbstractTestQueries method testNonDeterministicProjection.
@Test
public void testNonDeterministicProjection() {
MaterializedResult materializedResult = computeActual("SELECT r, r + 1 FROM (SELECT rand(100) r FROM orders) LIMIT 10");
assertEquals(materializedResult.getRowCount(), 10);
for (MaterializedRow materializedRow : materializedResult) {
assertEquals(materializedRow.getFieldCount(), 2);
assertEquals(((Number) materializedRow.getField(0)).intValue() + 1, materializedRow.getField(1));
}
}
use of com.facebook.presto.testing.MaterializedRow in project presto by prestodb.
the class TestCassandraIntegrationSmokeTest method testInsert.
@Test
public void testInsert() {
String sql = "SELECT key, typeuuid, typeinteger, typelong, typebytes, typetimestamp, typeansi, typeboolean, typedecimal, " + "typedouble, typefloat, typeinet, typevarchar, typevarint, typetimeuuid, typelist, typemap, typeset" + " FROM " + TABLE_ALL_TYPES_INSERT;
assertEquals(execute(sql).getRowCount(), 0);
// TODO Following types are not supported now. We need to change null into the value after fixing it
// blob, frozen<set<type>>, inet, list<type>, map<type,type>, set<type>, timeuuid, decimal, uuid, varint
// timestamp can be inserted but the expected and actual values are not same
execute("INSERT INTO " + TABLE_ALL_TYPES_INSERT + " (" + "key," + "typeuuid," + "typeinteger," + "typelong," + "typebytes," + "typetimestamp," + "typeansi," + "typeboolean," + "typedecimal," + "typedouble," + "typefloat," + "typeinet," + "typevarchar," + "typevarint," + "typetimeuuid," + "typelist," + "typemap," + "typeset" + ") VALUES (" + "'key1', " + "null, " + "1, " + "1000, " + "null, " + "timestamp '1970-01-01 08:34:05.0', " + "'ansi1', " + "true, " + "null, " + "0.3, " + "cast('0.4' as real), " + "null, " + "'varchar1', " + "null, " + "null, " + "null, " + "null, " + "null " + ")");
MaterializedResult result = execute(sql);
int rowCount = result.getRowCount();
assertEquals(rowCount, 1);
assertEquals(result.getMaterializedRows().get(0), new MaterializedRow(DEFAULT_PRECISION, "key1", null, 1, 1000L, null, LocalDateTime.of(1970, 1, 1, 8, 34, 5), "ansi1", true, null, 0.3, (float) 0.4, null, "varchar1", null, null, null, null, null));
// insert null for all datatypes
execute("INSERT INTO " + TABLE_ALL_TYPES_INSERT + " (" + "key, typeuuid, typeinteger, typelong, typebytes, typetimestamp, typeansi, typeboolean, typedecimal," + "typedouble, typefloat, typeinet, typevarchar, typevarint, typetimeuuid, typelist, typemap, typeset" + ") VALUES (" + "'key2', null, null, null, null, null, null, null, null," + "null, null, null, null, null, null, null, null, null)");
sql = "SELECT key, typeuuid, typeinteger, typelong, typebytes, typetimestamp, typeansi, typeboolean, typedecimal, " + "typedouble, typefloat, typeinet, typevarchar, typevarint, typetimeuuid, typelist, typemap, typeset" + " FROM " + TABLE_ALL_TYPES_INSERT + " WHERE key = 'key2'";
result = execute(sql);
rowCount = result.getRowCount();
assertEquals(rowCount, 1);
assertEquals(result.getMaterializedRows().get(0), new MaterializedRow(DEFAULT_PRECISION, "key2", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null));
// insert into only a subset of columns
execute("INSERT INTO " + TABLE_ALL_TYPES_INSERT + " (" + "key, typeinteger, typeansi, typeboolean) VALUES (" + "'key3', 999, 'ansi', false)");
sql = "SELECT key, typeuuid, typeinteger, typelong, typebytes, typetimestamp, typeansi, typeboolean, typedecimal, " + "typedouble, typefloat, typeinet, typevarchar, typevarint, typetimeuuid, typelist, typemap, typeset" + " FROM " + TABLE_ALL_TYPES_INSERT + " WHERE key = 'key3'";
result = execute(sql);
rowCount = result.getRowCount();
assertEquals(rowCount, 1);
assertEquals(result.getMaterializedRows().get(0), new MaterializedRow(DEFAULT_PRECISION, "key3", null, 999, null, null, null, "ansi", false, null, null, null, null, null, null, null, null, null, null));
}
use of com.facebook.presto.testing.MaterializedRow in project presto by prestodb.
the class TestCassandraIntegrationSmokeTest method assertSelect.
private void assertSelect(String tableName, boolean createdByPresto) {
Type uuidType = createdByPresto ? createUnboundedVarcharType() : createVarcharType(36);
Type inetType = createdByPresto ? createUnboundedVarcharType() : createVarcharType(45);
String sql = "SELECT " + " key, " + " typeuuid, " + " typeinteger, " + " typelong, " + " typebytes, " + " typetimestamp, " + " typeansi, " + " typeboolean, " + " typedecimal, " + " typedouble, " + " typefloat, " + " typeinet, " + " typevarchar, " + " typevarint, " + " typetimeuuid, " + " typelist, " + " typemap, " + " typeset " + " FROM " + tableName;
MaterializedResult result = execute(sql);
int rowCount = result.getRowCount();
assertEquals(rowCount, 9);
assertEquals(result.getTypes(), ImmutableList.of(createUnboundedVarcharType(), uuidType, INTEGER, BIGINT, VARBINARY, TIMESTAMP, createUnboundedVarcharType(), BOOLEAN, DOUBLE, DOUBLE, REAL, inetType, createUnboundedVarcharType(), createUnboundedVarcharType(), uuidType, createUnboundedVarcharType(), createUnboundedVarcharType(), createUnboundedVarcharType()));
List<MaterializedRow> sortedRows = result.getMaterializedRows().stream().sorted((o1, o2) -> o1.getField(1).toString().compareTo(o2.getField(1).toString())).collect(toList());
for (int rowNumber = 1; rowNumber <= rowCount; rowNumber++) {
assertEquals(sortedRows.get(rowNumber - 1), new MaterializedRow(DEFAULT_PRECISION, "key " + rowNumber, String.format("00000000-0000-0000-0000-%012d", rowNumber), rowNumber, rowNumber + 1000L, ByteBuffer.wrap(toByteArray(rowNumber)), TIMESTAMP_LOCAL, "ansi " + rowNumber, rowNumber % 2 == 0, Math.pow(2, rowNumber), Math.pow(4, rowNumber), (float) Math.pow(8, rowNumber), "127.0.0.1", "varchar " + rowNumber, BigInteger.TEN.pow(rowNumber).toString(), String.format("d2177dd0-eaa2-11de-a572-001b779c76e%d", rowNumber), String.format("[\"list-value-1%1$d\",\"list-value-2%1$d\"]", rowNumber), String.format("{%d:%d,%d:%d}", rowNumber, rowNumber + 1L, rowNumber + 2, rowNumber + 3L), "[false,true]"));
}
}
use of com.facebook.presto.testing.MaterializedRow in project presto by prestodb.
the class AbstractTestHiveClient method doTestBucketedTableEvolutionWithDifferentReadCount.
private void doTestBucketedTableEvolutionWithDifferentReadCount(HiveStorageFormat storageFormat, SchemaTableName tableName) throws Exception {
int rowCount = 100;
int bucketCount = 16;
// Produce a table with a partition with bucket count different but compatible with the table bucket count
createEmptyTable(tableName, storageFormat, ImmutableList.of(new Column("id", HIVE_LONG, Optional.empty(), Optional.empty()), new Column("name", HIVE_STRING, Optional.empty(), Optional.empty())), ImmutableList.of(new Column("pk", HIVE_STRING, Optional.empty(), Optional.empty())), Optional.of(new HiveBucketProperty(ImmutableList.of("id"), 4, ImmutableList.of(), HIVE_COMPATIBLE, Optional.empty())));
// write a 4-bucket partition
MaterializedResult.Builder bucket8Builder = MaterializedResult.resultBuilder(SESSION, BIGINT, VARCHAR, VARCHAR);
IntStream.range(0, rowCount).forEach(i -> bucket8Builder.row((long) i, String.valueOf(i), "four"));
insertData(tableName, bucket8Builder.build());
// Alter the bucket count to 16
alterBucketProperty(tableName, Optional.of(new HiveBucketProperty(ImmutableList.of("id"), bucketCount, ImmutableList.of(), HIVE_COMPATIBLE, Optional.empty())));
MaterializedResult result;
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorSession session = newSession();
ConnectorTableHandle hiveTableHandle = getTableHandle(metadata, tableName);
// read entire table
List<ColumnHandle> columnHandles = ImmutableList.<ColumnHandle>builder().addAll(metadata.getColumnHandles(session, hiveTableHandle).values()).build();
HiveTableLayoutHandle layoutHandle = (HiveTableLayoutHandle) getTableLayout(session, transaction.getMetadata(), hiveTableHandle, Constraint.alwaysTrue(), transaction).getHandle();
HiveBucketHandle bucketHandle = layoutHandle.getBucketHandle().get();
HiveTableLayoutHandle modifiedReadBucketCountLayoutHandle = new HiveTableLayoutHandle(layoutHandle.getSchemaTableName(), layoutHandle.getTablePath(), layoutHandle.getPartitionColumns(), layoutHandle.getDataColumns(), layoutHandle.getTableParameters(), layoutHandle.getPartitions().get(), layoutHandle.getDomainPredicate(), layoutHandle.getRemainingPredicate(), layoutHandle.getPredicateColumns(), layoutHandle.getPartitionColumnPredicate(), Optional.of(new HiveBucketHandle(bucketHandle.getColumns(), bucketHandle.getTableBucketCount(), 2)), layoutHandle.getBucketFilter(), false, "layout", Optional.empty(), false);
List<ConnectorSplit> splits = getAllSplits(session, transaction, modifiedReadBucketCountLayoutHandle);
assertEquals(splits.size(), 16);
TableHandle tableHandle = toTableHandle(transaction, hiveTableHandle, modifiedReadBucketCountLayoutHandle);
ImmutableList.Builder<MaterializedRow> allRows = ImmutableList.builder();
for (ConnectorSplit split : splits) {
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(transaction.getTransactionHandle(), session, split, tableHandle.getLayout().get(), columnHandles, NON_CACHEABLE)) {
MaterializedResult intermediateResult = materializeSourceDataStream(session, pageSource, getTypes(columnHandles));
allRows.addAll(intermediateResult.getMaterializedRows());
}
}
result = new MaterializedResult(allRows.build(), getTypes(columnHandles));
assertEquals(result.getRowCount(), rowCount);
Map<String, Integer> columnIndex = indexColumns(columnHandles);
int nameColumnIndex = columnIndex.get("name");
int bucketColumnIndex = columnIndex.get(BUCKET_COLUMN_NAME);
for (MaterializedRow row : result.getMaterializedRows()) {
String name = (String) row.getField(nameColumnIndex);
int bucket = (int) row.getField(bucketColumnIndex);
assertEquals(bucket, Integer.parseInt(name) % bucketCount);
}
}
}
use of com.facebook.presto.testing.MaterializedRow in project presto by prestodb.
the class AbstractTestHiveClient method testGetPartialRecords.
@Test
public void testGetPartialRecords() throws Exception {
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorSession session = newSession();
ConnectorTableHandle hiveTableHandle = getTableHandle(metadata, tablePartitionFormat);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(session, hiveTableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
ConnectorTableLayoutHandle layoutHandle = getLayout(session, transaction, hiveTableHandle, TupleDomain.all());
List<ConnectorSplit> splits = getAllSplits(session, transaction, layoutHandle);
assertEquals(splits.size(), partitionCount);
for (ConnectorSplit split : splits) {
HiveSplit hiveSplit = (HiveSplit) split;
List<HivePartitionKey> partitionKeys = hiveSplit.getPartitionKeys();
String ds = partitionKeys.get(0).getValue().orElse(null);
String fileFormat = partitionKeys.get(1).getValue().orElse(null);
HiveStorageFormat fileType = HiveStorageFormat.valueOf(fileFormat.toUpperCase());
int dummyPartition = Integer.parseInt(partitionKeys.get(2).getValue().orElse(null));
long rowNumber = 0;
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(transaction.getTransactionHandle(), session, hiveSplit, layoutHandle, columnHandles, NON_CACHEABLE)) {
assertPageSourceType(pageSource, fileType);
MaterializedResult result = materializeSourceDataStream(session, pageSource, getTypes(columnHandles));
for (MaterializedRow row : result) {
rowNumber++;
assertEquals(row.getField(columnIndex.get("t_double")), 6.2 + rowNumber);
assertEquals(row.getField(columnIndex.get("ds")), ds);
assertEquals(row.getField(columnIndex.get("file_format")), fileFormat);
assertEquals(row.getField(columnIndex.get("dummy")), dummyPartition);
}
}
assertEquals(rowNumber, 100);
}
}
}
Aggregations