Search in sources :

Example 21 with ValueSet

use of com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet in project aws-athena-query-federation by awslabs.

the class TimestreamRecordHandlerTest method readRecordsTimeSeriesView.

@Test
public void readRecordsTimeSeriesView() throws Exception {
    logger.info("readRecordsTimeSeriesView - enter");
    Schema schemaForReadView = SchemaBuilder.newBuilder().addField("region", Types.MinorType.VARCHAR.getType()).addField("az", Types.MinorType.VARCHAR.getType()).addField("hostname", Types.MinorType.VARCHAR.getType()).addField(FieldBuilder.newBuilder("cpu_utilization", Types.MinorType.LIST.getType()).addField(FieldBuilder.newBuilder("cpu_utilization", Types.MinorType.STRUCT.getType()).addDateMilliField("time").addFloat8Field("measure_value::double").build()).build()).addMetadata(VIEW_METADATA_FIELD, "select az, hostname, region,  CREATE_TIME_SERIES(time, measure_value::double) as cpu_utilization from \"" + DEFAULT_SCHEMA + "\".\"" + TEST_TABLE + "\" WHERE measure_name = 'cpu_utilization' GROUP BY measure_name, az, hostname, region").build();
    String expectedQuery = "WITH t1 AS ( select az, hostname, region,  CREATE_TIME_SERIES(time, measure_value::double) as cpu_utilization from \"my_schema\".\"my_table\" WHERE measure_name = 'cpu_utilization' GROUP BY measure_name, az, hostname, region )  SELECT region, az, hostname, cpu_utilization FROM t1 WHERE (\"az\" IN ('us-east-1a','us-east-1b'))";
    QueryResult mockResult = makeMockQueryResult(schemaForReadView, 1_000);
    when(mockClient.query(any(QueryRequest.class))).thenAnswer((Answer<QueryResult>) invocationOnMock -> {
        QueryRequest request = (QueryRequest) invocationOnMock.getArguments()[0];
        assertEquals("actual: " + request.getQueryString(), expectedQuery, request.getQueryString().replace("\n", ""));
        return mockResult;
    });
    S3SpillLocation splitLoc = S3SpillLocation.newBuilder().withBucket(UUID.randomUUID().toString()).withSplitId(UUID.randomUUID().toString()).withQueryId(UUID.randomUUID().toString()).withIsDirectory(true).build();
    Split split = Split.newBuilder(splitLoc, null).build();
    Map<String, ValueSet> constraintsMap = new HashMap<>();
    constraintsMap.put("az", EquatableValueSet.newBuilder(allocator, Types.MinorType.VARCHAR.getType(), true, true).add("us-east-1a").add("us-east-1b").build());
    ReadRecordsRequest request = new ReadRecordsRequest(IDENTITY, "default", "queryId-" + System.currentTimeMillis(), new TableName(DEFAULT_SCHEMA, TEST_TABLE), schemaForReadView, split, new Constraints(constraintsMap), // 100GB don't expect this to spill
    100_000_000_000L, 100_000_000_000L);
    RecordResponse rawResponse = handler.doReadRecords(allocator, request);
    ReadRecordsResponse response = (ReadRecordsResponse) rawResponse;
    logger.info("readRecordsTimeSeriesView: rows[{}]", response.getRecordCount());
    for (int i = 0; i < response.getRecordCount() && i < 10; i++) {
        logger.info("readRecordsTimeSeriesView: {}", BlockUtils.rowToString(response.getRecords(), i));
    }
    logger.info("readRecordsTimeSeriesView - exit");
}
Also used : QueryResult(com.amazonaws.services.timestreamquery.model.QueryResult) Schema(org.apache.arrow.vector.types.pojo.Schema) Types(org.apache.arrow.vector.types.Types) LoggerFactory(org.slf4j.LoggerFactory) BlockAllocator(com.amazonaws.athena.connector.lambda.data.BlockAllocator) SpillLocation(com.amazonaws.athena.connector.lambda.domain.spill.SpillLocation) Block(com.amazonaws.athena.connector.lambda.data.Block) ByteArrayInputStream(java.io.ByteArrayInputStream) After(org.junit.After) Map(java.util.Map) ValueSet(com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet) AmazonTimestreamQuery(com.amazonaws.services.timestreamquery.AmazonTimestreamQuery) BlockAllocatorImpl(com.amazonaws.athena.connector.lambda.data.BlockAllocatorImpl) Split(com.amazonaws.athena.connector.lambda.domain.Split) ReadRecordsResponse(com.amazonaws.athena.connector.lambda.records.ReadRecordsResponse) UUID(java.util.UUID) TableName(com.amazonaws.athena.connector.lambda.domain.TableName) RecordResponse(com.amazonaws.athena.connector.lambda.records.RecordResponse) Matchers.any(org.mockito.Matchers.any) List(java.util.List) ByteStreams(com.google.common.io.ByteStreams) BlockUtils(com.amazonaws.athena.connector.lambda.data.BlockUtils) S3ObjectInputStream(com.amazonaws.services.s3.model.S3ObjectInputStream) EncryptionKeyFactory(com.amazonaws.athena.connector.lambda.security.EncryptionKeyFactory) Mockito.mock(org.mockito.Mockito.mock) Mock(org.mockito.Mock) EquatableValueSet(com.amazonaws.athena.connector.lambda.domain.predicate.EquatableValueSet) RunWith(org.junit.runner.RunWith) HashMap(java.util.HashMap) Matchers.anyString(org.mockito.Matchers.anyString) ArrayList(java.util.ArrayList) RemoteReadRecordsResponse(com.amazonaws.athena.connector.lambda.records.RemoteReadRecordsResponse) Answer(org.mockito.stubbing.Answer) InvocationOnMock(org.mockito.invocation.InvocationOnMock) S3Object(com.amazonaws.services.s3.model.S3Object) SchemaBuilder(com.amazonaws.athena.connector.lambda.data.SchemaBuilder) TestName(org.junit.rules.TestName) LocalKeyFactory(com.amazonaws.athena.connector.lambda.security.LocalKeyFactory) Matchers.anyObject(org.mockito.Matchers.anyObject) AmazonS3(com.amazonaws.services.s3.AmazonS3) FederatedIdentity(com.amazonaws.athena.connector.lambda.security.FederatedIdentity) PutObjectResult(com.amazonaws.services.s3.model.PutObjectResult) S3BlockSpillReader(com.amazonaws.athena.connector.lambda.data.S3BlockSpillReader) Before(org.junit.Before) Logger(org.slf4j.Logger) AmazonAthena(com.amazonaws.services.athena.AmazonAthena) Assert.assertNotNull(org.junit.Assert.assertNotNull) ReadRecordsRequest(com.amazonaws.athena.connector.lambda.records.ReadRecordsRequest) Assert.assertTrue(org.junit.Assert.assertTrue) AWSSecretsManager(com.amazonaws.services.secretsmanager.AWSSecretsManager) Test(org.junit.Test) IOException(java.io.IOException) Mockito.when(org.mockito.Mockito.when) FieldBuilder(com.amazonaws.athena.connector.lambda.data.FieldBuilder) Constraints(com.amazonaws.athena.connector.lambda.domain.predicate.Constraints) S3SpillLocation(com.amazonaws.athena.connector.lambda.domain.spill.S3SpillLocation) Rule(org.junit.Rule) MockitoJUnitRunner(org.mockito.runners.MockitoJUnitRunner) QueryRequest(com.amazonaws.services.timestreamquery.model.QueryRequest) TestUtils.makeMockQueryResult(com.amazonaws.athena.connectors.timestream.TestUtils.makeMockQueryResult) VIEW_METADATA_FIELD(com.amazonaws.athena.connector.lambda.handlers.GlueMetadataHandler.VIEW_METADATA_FIELD) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) InputStream(java.io.InputStream) QueryRequest(com.amazonaws.services.timestreamquery.model.QueryRequest) HashMap(java.util.HashMap) ReadRecordsResponse(com.amazonaws.athena.connector.lambda.records.ReadRecordsResponse) RemoteReadRecordsResponse(com.amazonaws.athena.connector.lambda.records.RemoteReadRecordsResponse) Schema(org.apache.arrow.vector.types.pojo.Schema) Matchers.anyString(org.mockito.Matchers.anyString) RecordResponse(com.amazonaws.athena.connector.lambda.records.RecordResponse) TableName(com.amazonaws.athena.connector.lambda.domain.TableName) QueryResult(com.amazonaws.services.timestreamquery.model.QueryResult) TestUtils.makeMockQueryResult(com.amazonaws.athena.connectors.timestream.TestUtils.makeMockQueryResult) ReadRecordsRequest(com.amazonaws.athena.connector.lambda.records.ReadRecordsRequest) Constraints(com.amazonaws.athena.connector.lambda.domain.predicate.Constraints) S3SpillLocation(com.amazonaws.athena.connector.lambda.domain.spill.S3SpillLocation) Split(com.amazonaws.athena.connector.lambda.domain.Split) ValueSet(com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet) EquatableValueSet(com.amazonaws.athena.connector.lambda.domain.predicate.EquatableValueSet) Test(org.junit.Test)

Example 22 with ValueSet

use of com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet in project aws-athena-query-federation by awslabs.

the class SelectQueryBuilderTest method build.

@Test
public void build() {
    logger.info("build: enter");
    String expected = "SELECT col1, col2, col3, col4 FROM \"myDatabase\".\"myTable\" WHERE (\"col4\" IN ('val1','val2')) AND ((\"col2\" < 1)) AND (\"col3\" IN (20000,10000)) AND ((\"col1\" > 1))";
    Map<String, ValueSet> constraintsMap = new HashMap<>();
    constraintsMap.put("col1", SortedRangeSet.copyOf(Types.MinorType.INT.getType(), ImmutableList.of(Range.greaterThan(allocator, Types.MinorType.INT.getType(), 1)), false));
    constraintsMap.put("col2", SortedRangeSet.copyOf(Types.MinorType.INT.getType(), ImmutableList.of(Range.lessThan(allocator, Types.MinorType.INT.getType(), 1)), false));
    constraintsMap.put("col3", EquatableValueSet.newBuilder(allocator, Types.MinorType.INT.getType(), true, true).add(20000L).add(10000L).build());
    constraintsMap.put("col4", EquatableValueSet.newBuilder(allocator, Types.MinorType.VARCHAR.getType(), true, true).add("val1").add("val2").build());
    Schema schema = SchemaBuilder.newBuilder().addStringField("col1").addIntField("col2").addBigIntField("col3").addStringField("col4").build();
    String actual = queryFactory.createSelectQueryBuilder(VIEW_METADATA_FIELD).withDatabaseName("myDatabase").withTableName("myTable").withProjection(schema).withConjucts(new Constraints(constraintsMap)).build().replace("\n", "");
    logger.info("build: actual[{}]", actual);
    assertEquals(expected, actual);
    logger.info("build: exit");
}
Also used : Constraints(com.amazonaws.athena.connector.lambda.domain.predicate.Constraints) HashMap(java.util.HashMap) Schema(org.apache.arrow.vector.types.pojo.Schema) EquatableValueSet(com.amazonaws.athena.connector.lambda.domain.predicate.EquatableValueSet) ValueSet(com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet) Test(org.junit.Test)

Example 23 with ValueSet

use of com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet in project aws-athena-query-federation by awslabs.

the class VerticaMetadataHandlerTest method getPartitions.

@Test
public void getPartitions() throws Exception {
    Schema tableSchema = SchemaBuilder.newBuilder().addIntField("day").addIntField("month").addIntField("year").addStringField("preparedStmt").addStringField("queryId").addStringField("awsRegionSql").build();
    Set<String> partitionCols = new HashSet<>();
    partitionCols.add("preparedStmt");
    partitionCols.add("queryId");
    partitionCols.add("awsRegionSql");
    Map<String, ValueSet> constraintsMap = new HashMap<>();
    constraintsMap.put("day", SortedRangeSet.copyOf(org.apache.arrow.vector.types.Types.MinorType.INT.getType(), ImmutableList.of(Range.greaterThan(allocator, org.apache.arrow.vector.types.Types.MinorType.INT.getType(), 0)), false));
    constraintsMap.put("month", SortedRangeSet.copyOf(org.apache.arrow.vector.types.Types.MinorType.INT.getType(), ImmutableList.of(Range.greaterThan(allocator, org.apache.arrow.vector.types.Types.MinorType.INT.getType(), 0)), false));
    constraintsMap.put("year", SortedRangeSet.copyOf(org.apache.arrow.vector.types.Types.MinorType.INT.getType(), ImmutableList.of(Range.greaterThan(allocator, org.apache.arrow.vector.types.Types.MinorType.INT.getType(), 2000)), false));
    GetTableLayoutRequest req = null;
    GetTableLayoutResponse res = null;
    String testSql = "Select * from schema1.table1";
    String[] test = new String[] { "Select * from schema1.table1", "Select * from schema1.table1" };
    String[] schema = { "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME" };
    Object[][] values = { { "testSchema", "testTable1", "day", "int" }, { "testSchema", "testTable1", "month", "int" }, { "testSchema", "testTable1", "year", "int" }, { "testSchema", "testTable1", "preparedStmt", "varchar" }, { "testSchema", "testTable1", "queryId", "varchar" }, { "testSchema", "testTable1", "awsRegionSql", "varchar" } };
    int[] types = { Types.INTEGER, Types.INTEGER, Types.INTEGER, Types.VARCHAR, Types.VARCHAR, Types.VARCHAR };
    List<TableName> expectedTables = new ArrayList<>();
    expectedTables.add(new TableName("testSchema", "testTable1"));
    AtomicInteger rowNumber = new AtomicInteger(-1);
    ResultSet resultSet = mockResultSet(schema, types, values, rowNumber);
    Mockito.when(connection.getMetaData().getColumns(null, "schema1", "table1", null)).thenReturn(resultSet);
    Mockito.when(queryFactory.createVerticaExportQueryBuilder()).thenReturn(new VerticaExportQueryBuilder(new ST("templateVerticaExportQuery")));
    Mockito.when(verticaMetadataHandlerMocked.getS3ExportBucket()).thenReturn("testS3Bucket");
    try {
        req = new GetTableLayoutRequest(this.federatedIdentity, "queryId", "default", new TableName("schema1", "table1"), new Constraints(constraintsMap), tableSchema, partitionCols);
        res = verticaMetadataHandlerMocked.doGetTableLayout(allocator, req);
        Block partitions = res.getPartitions();
        String actualQueryID = partitions.getFieldReader("queryId").readText().toString();
        String expectedExportSql = "EXPORT TO PARQUET(directory = 's3://testS3Bucket/" + actualQueryID + "', Compression='snappy', fileSizeMB=16, rowGroupSizeMB=16) " + "AS SELECT day,month,year,preparedStmt,queryId,awsRegionSql " + "FROM \"schema1\".\"table1\" " + "WHERE ((\"day\" > 0 )) AND ((\"month\" > 0 )) AND ((\"year\" > 2000 ))";
        Assert.assertEquals(expectedExportSql, partitions.getFieldReader("preparedStmt").readText().toString());
        for (int row = 0; row < partitions.getRowCount() && row < 1; row++) {
            logger.info("doGetTableLayout:{} {}", row, BlockUtils.rowToString(partitions, row));
        }
        assertTrue(partitions.getRowCount() > 0);
        logger.info("doGetTableLayout: partitions[{}]", partitions.getRowCount());
    } finally {
        try {
            req.close();
            res.close();
        } catch (Exception ex) {
            logger.error("doGetTableLayout: ", ex);
        }
    }
    logger.info("doGetTableLayout - exit");
}
Also used : ST(org.stringtemplate.v4.ST) Schema(org.apache.arrow.vector.types.pojo.Schema) Matchers.anyString(org.mockito.Matchers.anyString) TableName(com.amazonaws.athena.connector.lambda.domain.TableName) Constraints(com.amazonaws.athena.connector.lambda.domain.predicate.Constraints) VerticaExportQueryBuilder(com.amazonaws.athena.connectors.vertica.query.VerticaExportQueryBuilder) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ValueSet(com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet) Test(org.junit.Test)

Example 24 with ValueSet

use of com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet in project aws-athena-query-federation by awslabs.

the class SecurityGroupsTableProvider method readWithConstraint.

/**
 * Calls DescribeSecurityGroups on the AWS EC2 Client returning all SecurityGroup rules that match the supplied
 * predicate and attempting to push down certain predicates (namely queries for specific SecurityGroups) to EC2.
 *
 * @See TableProvider
 */
@Override
public void readWithConstraint(BlockSpiller spiller, ReadRecordsRequest recordsRequest, QueryStatusChecker queryStatusChecker) {
    boolean done = false;
    DescribeSecurityGroupsRequest request = new DescribeSecurityGroupsRequest();
    ValueSet idConstraint = recordsRequest.getConstraints().getSummary().get("id");
    if (idConstraint != null && idConstraint.isSingleValue()) {
        request.setGroupIds(Collections.singletonList(idConstraint.getSingleValue().toString()));
    }
    ValueSet nameConstraint = recordsRequest.getConstraints().getSummary().get("name");
    if (nameConstraint != null && nameConstraint.isSingleValue()) {
        request.setGroupNames(Collections.singletonList(nameConstraint.getSingleValue().toString()));
    }
    while (!done) {
        DescribeSecurityGroupsResult response = ec2.describeSecurityGroups(request);
        // Each rule is mapped to a row in the response. SGs have INGRESS and EGRESS rules.
        for (SecurityGroup next : response.getSecurityGroups()) {
            for (IpPermission nextPerm : next.getIpPermissions()) {
                instanceToRow(next, nextPerm, INGRESS, spiller);
            }
            for (IpPermission nextPerm : next.getIpPermissionsEgress()) {
                instanceToRow(next, nextPerm, EGRESS, spiller);
            }
        }
        request.setNextToken(response.getNextToken());
        if (response.getNextToken() == null || !queryStatusChecker.isQueryRunning()) {
            done = true;
        }
    }
}
Also used : DescribeSecurityGroupsRequest(com.amazonaws.services.ec2.model.DescribeSecurityGroupsRequest) IpPermission(com.amazonaws.services.ec2.model.IpPermission) DescribeSecurityGroupsResult(com.amazonaws.services.ec2.model.DescribeSecurityGroupsResult) SecurityGroup(com.amazonaws.services.ec2.model.SecurityGroup) ValueSet(com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet)

Example 25 with ValueSet

use of com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet in project aws-athena-query-federation by awslabs.

the class VpcTableProvider method readWithConstraint.

/**
 * Calls DescribeVPCs on the AWS EC2 Client returning all VPCs that match the supplied predicate and attempting
 * to push down certain predicates (namely queries for specific VPCs) to EC2.
 *
 * @See TableProvider
 */
@Override
public void readWithConstraint(BlockSpiller spiller, ReadRecordsRequest recordsRequest, QueryStatusChecker queryStatusChecker) {
    DescribeVpcsRequest request = new DescribeVpcsRequest();
    ValueSet idConstraint = recordsRequest.getConstraints().getSummary().get("id");
    if (idConstraint != null && idConstraint.isSingleValue()) {
        request.setVpcIds(Collections.singletonList(idConstraint.getSingleValue().toString()));
    }
    DescribeVpcsResult response = ec2.describeVpcs(request);
    for (Vpc vpc : response.getVpcs()) {
        instanceToRow(vpc, spiller);
    }
}
Also used : DescribeVpcsResult(com.amazonaws.services.ec2.model.DescribeVpcsResult) DescribeVpcsRequest(com.amazonaws.services.ec2.model.DescribeVpcsRequest) Vpc(com.amazonaws.services.ec2.model.Vpc) ValueSet(com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet)

Aggregations

ValueSet (com.amazonaws.athena.connector.lambda.domain.predicate.ValueSet)104 Test (org.junit.Test)66 Constraints (com.amazonaws.athena.connector.lambda.domain.predicate.Constraints)63 HashMap (java.util.HashMap)48 TableName (com.amazonaws.athena.connector.lambda.domain.TableName)47 Schema (org.apache.arrow.vector.types.pojo.Schema)37 Split (com.amazonaws.athena.connector.lambda.domain.Split)31 Range (com.amazonaws.athena.connector.lambda.domain.predicate.Range)27 ReadRecordsRequest (com.amazonaws.athena.connector.lambda.records.ReadRecordsRequest)27 EquatableValueSet (com.amazonaws.athena.connector.lambda.domain.predicate.EquatableValueSet)26 ArrayList (java.util.ArrayList)25 Matchers.anyString (org.mockito.Matchers.anyString)25 RecordResponse (com.amazonaws.athena.connector.lambda.records.RecordResponse)24 Block (com.amazonaws.athena.connector.lambda.data.Block)23 S3SpillLocation (com.amazonaws.athena.connector.lambda.domain.spill.S3SpillLocation)21 RemoteReadRecordsResponse (com.amazonaws.athena.connector.lambda.records.RemoteReadRecordsResponse)18 SchemaBuilder (com.amazonaws.athena.connector.lambda.data.SchemaBuilder)17 ReadRecordsResponse (com.amazonaws.athena.connector.lambda.records.ReadRecordsResponse)17 InvocationOnMock (org.mockito.invocation.InvocationOnMock)17 BlockAllocatorImpl (com.amazonaws.athena.connector.lambda.data.BlockAllocatorImpl)13