Search in sources :

Example 26 with DataContext

use of org.apache.calcite.DataContext in project storm by apache.

the class TestPlanCompiler method testDateKeywords.

@Test
public void testDateKeywords() throws Exception {
    int EXPECTED_VALUE_SIZE = 1;
    String sql = "SELECT " + "LOCALTIME, CURRENT_TIME, LOCALTIMESTAMP, CURRENT_TIMESTAMP, CURRENT_DATE " + "FROM FOO " + "WHERE ID > 0 AND ID < 2";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
    final Map<String, ISqlTridentDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlTridentDataSource());
    QueryPlanner planner = new QueryPlanner(state.schema());
    AbstractTridentProcessor proc = planner.compile(data, sql);
    final DataContext dataContext = proc.getDataContext();
    final TridentTopology topo = proc.build();
    Fields f = proc.outputStream().getOutputFields();
    proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
    runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
    long utcTimestamp = (long) dataContext.get(DataContext.Variable.UTC_TIMESTAMP.camelName);
    long currentTimestamp = (long) dataContext.get(DataContext.Variable.CURRENT_TIMESTAMP.camelName);
    long localTimestamp = (long) dataContext.get(DataContext.Variable.LOCAL_TIMESTAMP.camelName);
    System.out.println(getCollectedValues());
    java.sql.Timestamp timestamp = new java.sql.Timestamp(utcTimestamp);
    int dateInt = (int) timestamp.toLocalDateTime().atOffset(ZoneOffset.UTC).toLocalDate().toEpochDay();
    int localTimeInt = (int) (localTimestamp % DateTimeUtils.MILLIS_PER_DAY);
    int currentTimeInt = (int) (currentTimestamp % DateTimeUtils.MILLIS_PER_DAY);
    Assert.assertArrayEquals(new Values[] { new Values(localTimeInt, currentTimeInt, localTimestamp, currentTimestamp, dateInt) }, getCollectedValues().toArray());
}
Also used : HashMap(java.util.HashMap) Values(org.apache.storm.tuple.Values) MockState.getCollectedValues(org.apache.storm.sql.TestUtils.MockState.getCollectedValues) ISqlTridentDataSource(org.apache.storm.sql.runtime.ISqlTridentDataSource) QueryPlanner(org.apache.storm.sql.planner.trident.QueryPlanner) TestUtils(org.apache.storm.sql.TestUtils) AbstractTridentProcessor(org.apache.storm.sql.AbstractTridentProcessor) DataContext(org.apache.calcite.DataContext) Fields(org.apache.storm.tuple.Fields) TridentTopology(org.apache.storm.trident.TridentTopology) Test(org.junit.Test)

Example 27 with DataContext

use of org.apache.calcite.DataContext in project storm by apache.

the class EvaluationCalc method execute.

@Override
public Iterable<Values> execute(TridentTuple input) {
    Context calciteContext = new StormContext(dataContext);
    calciteContext.values = input.getValues().toArray();
    if (filterInstance != null) {
        filterInstance.execute(calciteContext, outputValues);
        // filtered out
        if (outputValues[0] == null || !((Boolean) outputValues[0])) {
            return Collections.emptyList();
        }
    }
    if (projectionInstance != null) {
        projectionInstance.execute(calciteContext, outputValues);
        return Collections.singletonList(new Values(outputValues));
    } else {
        return Collections.singletonList(new Values(input.getValues()));
    }
}
Also used : DataContext(org.apache.calcite.DataContext) TridentOperationContext(org.apache.storm.trident.operation.TridentOperationContext) StormContext(org.apache.calcite.interpreter.StormContext) Context(org.apache.calcite.interpreter.Context) Values(org.apache.storm.tuple.Values) StormContext(org.apache.calcite.interpreter.StormContext)

Example 28 with DataContext

use of org.apache.calcite.DataContext in project storm by apache.

the class EvaluationFunction method apply.

@Override
public Values apply(Values input) {
    Context calciteContext = new StormContext(dataContext);
    calciteContext.values = input.toArray();
    projectionInstance.execute(calciteContext, outputValues);
    return new Values(outputValues);
}
Also used : Context(org.apache.calcite.interpreter.Context) DataContext(org.apache.calcite.DataContext) StormContext(org.apache.calcite.interpreter.StormContext) Values(org.apache.storm.tuple.Values) StormContext(org.apache.calcite.interpreter.StormContext)

Example 29 with DataContext

use of org.apache.calcite.DataContext in project samza by apache.

the class TestProjectTranslator method testTranslate.

@Test
public void testTranslate() throws IOException, ClassNotFoundException {
    // setup mock values to the constructor of FilterTranslator
    LogicalProject mockProject = PowerMockito.mock(LogicalProject.class);
    Context mockContext = mock(Context.class);
    ContainerContext mockContainerContext = mock(ContainerContext.class);
    TranslatorContext mockTranslatorContext = mock(TranslatorContext.class);
    TestMetricsRegistryImpl testMetricsRegistryImpl = new TestMetricsRegistryImpl();
    RelNode mockInput = mock(RelNode.class);
    List<RelNode> inputs = new ArrayList<>();
    inputs.add(mockInput);
    when(mockInput.getId()).thenReturn(1);
    when(mockProject.getId()).thenReturn(2);
    when(mockProject.getInputs()).thenReturn(inputs);
    when(mockProject.getInput()).thenReturn(mockInput);
    RelDataType mockRowType = mock(RelDataType.class);
    when(mockRowType.getFieldCount()).thenReturn(1);
    when(mockProject.getRowType()).thenReturn(mockRowType);
    RexNode mockRexField = mock(RexNode.class);
    List<Pair<RexNode, String>> namedProjects = new ArrayList<>();
    namedProjects.add(Pair.of(mockRexField, "test_field"));
    when(mockProject.getNamedProjects()).thenReturn(namedProjects);
    StreamApplicationDescriptorImpl mockAppDesc = mock(StreamApplicationDescriptorImpl.class);
    OperatorSpec<Object, SamzaSqlRelMessage> mockInputOp = mock(OperatorSpec.class);
    MessageStream<SamzaSqlRelMessage> mockStream = new MessageStreamImpl<>(mockAppDesc, mockInputOp);
    when(mockTranslatorContext.getMessageStream(eq(1))).thenReturn(mockStream);
    doAnswer(this.getRegisterMessageStreamAnswer()).when(mockTranslatorContext).registerMessageStream(eq(2), any(MessageStream.class));
    RexToJavaCompiler mockCompiler = mock(RexToJavaCompiler.class);
    when(mockTranslatorContext.getExpressionCompiler()).thenReturn(mockCompiler);
    Expression mockExpr = mock(Expression.class);
    when(mockCompiler.compile(any(), any())).thenReturn(mockExpr);
    when(mockContext.getContainerContext()).thenReturn(mockContainerContext);
    when(mockContainerContext.getContainerMetricsRegistry()).thenReturn(testMetricsRegistryImpl);
    // Apply translate() method to verify that we are getting the correct map operator constructed
    ProjectTranslator projectTranslator = new ProjectTranslator(1);
    projectTranslator.translate(mockProject, LOGICAL_OP_ID, mockTranslatorContext);
    // make sure that context has been registered with LogicFilter and output message streams
    verify(mockTranslatorContext, times(1)).registerRelNode(2, mockProject);
    verify(mockTranslatorContext, times(1)).registerMessageStream(2, this.getRegisteredMessageStream(2));
    when(mockTranslatorContext.getRelNode(2)).thenReturn(mockProject);
    when(mockTranslatorContext.getMessageStream(2)).thenReturn(this.getRegisteredMessageStream(2));
    StreamOperatorSpec projectSpec = (StreamOperatorSpec) Whitebox.getInternalState(this.getRegisteredMessageStream(2), "operatorSpec");
    assertNotNull(projectSpec);
    assertEquals(projectSpec.getOpCode(), OperatorSpec.OpCode.MAP);
    // Verify that the bootstrap() method will establish the context for the map function
    Map<Integer, TranslatorContext> mockContexts = new HashMap<>();
    mockContexts.put(1, mockTranslatorContext);
    when(mockContext.getApplicationTaskContext()).thenReturn(new SamzaSqlApplicationContext(mockContexts));
    projectSpec.getTransformFn().init(mockContext);
    MapFunction mapFn = (MapFunction) Whitebox.getInternalState(projectSpec, "mapFn");
    assertNotNull(mapFn);
    assertEquals(mockTranslatorContext, Whitebox.getInternalState(mapFn, "translatorContext"));
    assertEquals(mockProject, Whitebox.getInternalState(mapFn, "project"));
    assertEquals(mockExpr, Whitebox.getInternalState(mapFn, "expr"));
    // Verify TestMetricsRegistryImpl works with Project
    assertEquals(1, testMetricsRegistryImpl.getGauges().size());
    assertEquals(2, testMetricsRegistryImpl.getGauges().get(LOGICAL_OP_ID).size());
    assertEquals(1, testMetricsRegistryImpl.getCounters().size());
    assertEquals(2, testMetricsRegistryImpl.getCounters().get(LOGICAL_OP_ID).size());
    assertEquals(0, testMetricsRegistryImpl.getCounters().get(LOGICAL_OP_ID).get(0).getCount());
    assertEquals(0, testMetricsRegistryImpl.getCounters().get(LOGICAL_OP_ID).get(1).getCount());
    // Calling mapFn.apply() to verify the filter function is correctly applied to the input message
    SamzaSqlRelMessage mockInputMsg = new SamzaSqlRelMessage(new ArrayList<>(), new ArrayList<>(), new SamzaSqlRelMsgMetadata(0L, 0L));
    SamzaSqlExecutionContext executionContext = mock(SamzaSqlExecutionContext.class);
    DataContext dataContext = mock(DataContext.class);
    when(mockTranslatorContext.getExecutionContext()).thenReturn(executionContext);
    when(mockTranslatorContext.getDataContext()).thenReturn(dataContext);
    Object[] result = new Object[1];
    final Object mockFieldObj = new Object();
    doAnswer(invocation -> {
        Object[] retValue = invocation.getArgumentAt(4, Object[].class);
        retValue[0] = mockFieldObj;
        return null;
    }).when(mockExpr).execute(eq(executionContext), eq(mockContext), eq(dataContext), eq(mockInputMsg.getSamzaSqlRelRecord().getFieldValues().toArray()), eq(result));
    SamzaSqlRelMessage retMsg = (SamzaSqlRelMessage) mapFn.apply(mockInputMsg);
    assertEquals(retMsg.getSamzaSqlRelRecord().getFieldNames(), Collections.singletonList("test_field"));
    assertEquals(retMsg.getSamzaSqlRelRecord().getFieldValues(), Collections.singletonList(mockFieldObj));
    // Verify mapFn.apply() updates the TestMetricsRegistryImpl metrics
    assertEquals(1, testMetricsRegistryImpl.getCounters().get(LOGICAL_OP_ID).get(0).getCount());
    assertEquals(1, testMetricsRegistryImpl.getCounters().get(LOGICAL_OP_ID).get(1).getCount());
}
Also used : MessageStreamImpl(org.apache.samza.operators.MessageStreamImpl) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) RelDataType(org.apache.calcite.rel.type.RelDataType) MapFunction(org.apache.samza.operators.functions.MapFunction) ContainerContext(org.apache.samza.context.ContainerContext) StreamOperatorSpec(org.apache.samza.operators.spec.StreamOperatorSpec) DataContext(org.apache.calcite.DataContext) StreamApplicationDescriptorImpl(org.apache.samza.application.descriptors.StreamApplicationDescriptorImpl) SamzaSqlApplicationContext(org.apache.samza.sql.runner.SamzaSqlApplicationContext) MessageStream(org.apache.samza.operators.MessageStream) Pair(org.apache.calcite.util.Pair) ContainerContext(org.apache.samza.context.ContainerContext) SamzaSqlExecutionContext(org.apache.samza.sql.data.SamzaSqlExecutionContext) DataContext(org.apache.calcite.DataContext) Context(org.apache.samza.context.Context) SamzaSqlApplicationContext(org.apache.samza.sql.runner.SamzaSqlApplicationContext) SamzaSqlRelMsgMetadata(org.apache.samza.sql.data.SamzaSqlRelMsgMetadata) RexToJavaCompiler(org.apache.samza.sql.data.RexToJavaCompiler) TestMetricsRegistryImpl(org.apache.samza.sql.util.TestMetricsRegistryImpl) RelNode(org.apache.calcite.rel.RelNode) Expression(org.apache.samza.sql.data.Expression) SamzaSqlExecutionContext(org.apache.samza.sql.data.SamzaSqlExecutionContext) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) SamzaSqlRelMessage(org.apache.samza.sql.data.SamzaSqlRelMessage) RexNode(org.apache.calcite.rex.RexNode) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 30 with DataContext

use of org.apache.calcite.DataContext in project druid by druid-io.

the class SystemSchemaTest method testServersTable.

@Test
public void testServersTable() {
    SystemSchema.ServersTable serversTable = EasyMock.createMockBuilder(SystemSchema.ServersTable.class).withConstructor(druidNodeDiscoveryProvider, serverInventoryView, authMapper, overlordClient, coordinatorClient).createMock();
    EasyMock.replay(serversTable);
    final DruidNodeDiscovery coordinatorNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class);
    final DruidNodeDiscovery overlordNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class);
    final DruidNodeDiscovery brokerNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class);
    final DruidNodeDiscovery routerNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class);
    final DruidNodeDiscovery historicalNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class);
    final DruidNodeDiscovery mmNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class);
    final DruidNodeDiscovery peonNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class);
    final DruidNodeDiscovery indexerNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class);
    EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.COORDINATOR)).andReturn(coordinatorNodeDiscovery).once();
    EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.OVERLORD)).andReturn(overlordNodeDiscovery).once();
    EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.BROKER)).andReturn(brokerNodeDiscovery).once();
    EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.ROUTER)).andReturn(routerNodeDiscovery).once();
    EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.HISTORICAL)).andReturn(historicalNodeDiscovery).once();
    EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.MIDDLE_MANAGER)).andReturn(mmNodeDiscovery).once();
    EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.INDEXER)).andReturn(indexerNodeDiscovery).once();
    EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.PEON)).andReturn(peonNodeDiscovery).once();
    EasyMock.expect(coordinatorNodeDiscovery.getAllNodes()).andReturn(ImmutableList.of(coordinator, coordinator2)).once();
    EasyMock.expect(overlordNodeDiscovery.getAllNodes()).andReturn(ImmutableList.of(overlord, overlord2)).once();
    EasyMock.expect(brokerNodeDiscovery.getAllNodes()).andReturn(ImmutableList.of(broker1, broker2, brokerWithBroadcastSegments)).once();
    EasyMock.expect(routerNodeDiscovery.getAllNodes()).andReturn(ImmutableList.of(router)).once();
    EasyMock.expect(historicalNodeDiscovery.getAllNodes()).andReturn(ImmutableList.of(historical1, historical2, lameHistorical)).once();
    EasyMock.expect(mmNodeDiscovery.getAllNodes()).andReturn(ImmutableList.of(middleManager)).once();
    EasyMock.expect(peonNodeDiscovery.getAllNodes()).andReturn(ImmutableList.of(peon1, peon2)).once();
    EasyMock.expect(indexerNodeDiscovery.getAllNodes()).andReturn(ImmutableList.of(indexer)).once();
    EasyMock.expect(coordinatorClient.findCurrentLeader()).andReturn(coordinator.getDruidNode().getHostAndPortToUse()).once();
    EasyMock.expect(overlordClient.findCurrentLeader()).andReturn(overlord.getDruidNode().getHostAndPortToUse()).once();
    final List<DruidServer> servers = new ArrayList<>();
    servers.add(mockDataServer(historical1.getDruidNode().getHostAndPortToUse(), 200L, 1000L, "tier"));
    servers.add(mockDataServer(historical2.getDruidNode().getHostAndPortToUse(), 400L, 1000L, "tier"));
    servers.add(mockDataServer(peon1.getDruidNode().getHostAndPortToUse(), 0L, 1000L, "tier"));
    servers.add(mockDataServer(peon2.getDruidNode().getHostAndPortToUse(), 0L, 1000L, "tier"));
    servers.add(mockDataServer(broker1.getDruidNode().getHostAndPortToUse(), 0L, 1000L, "tier"));
    servers.add(mockDataServer(broker2.getDruidNode().getHostAndPortToUse(), 0L, 1000L, "tier"));
    servers.add(mockDataServer(indexer.getDruidNode().getHostAndPortToUse(), 0L, 1000L, "tier"));
    servers.add(mockDataServer(brokerWithBroadcastSegments.getDruidNode().getHostAndPortToUse(), 0L, 1000L, "tier"));
    EasyMock.expect(serverInventoryView.getInventoryValue(lameHistorical.getDruidNode().getHostAndPortToUse())).andReturn(null).once();
    EasyMock.replay(druidNodeDiscoveryProvider, serverInventoryView, coordinatorClient, overlordClient);
    EasyMock.replay(servers.toArray(new Object[0]));
    EasyMock.replay(coordinatorNodeDiscovery, overlordNodeDiscovery, brokerNodeDiscovery, routerNodeDiscovery, historicalNodeDiscovery, mmNodeDiscovery, peonNodeDiscovery, indexerNodeDiscovery);
    DataContext dataContext = createDataContext(Users.SUPER);
    final List<Object[]> rows = serversTable.scan(dataContext).toList();
    rows.sort((Object[] row1, Object[] row2) -> ((Comparable) row1[0]).compareTo(row2[0]));
    final List<Object[]> expectedRows = new ArrayList<>();
    final Long nonLeader = NullHandling.defaultLongValue();
    expectedRows.add(createExpectedRow("brokerHost:8082", "brokerHost", 8082, -1, NodeRole.BROKER, null, 0L, 0L, nonLeader));
    expectedRows.add(createExpectedRow("brokerHostWithBroadcastSegments:8282", "brokerHostWithBroadcastSegments", 8082, 8282, NodeRole.BROKER, "tier", 0L, 1000L, nonLeader));
    expectedRows.add(createExpectedRow("histHost:8083", "histHost", 8083, -1, NodeRole.HISTORICAL, "tier", 400L, 1000L, nonLeader));
    expectedRows.add(createExpectedRow("indexerHost:8091", "indexerHost", 8091, -1, NodeRole.INDEXER, "tier", 0L, 1000L, nonLeader));
    expectedRows.add(createExpectedRow("lameHost:8083", "lameHost", 8083, -1, NodeRole.HISTORICAL, "tier", 0L, 1000L, nonLeader));
    expectedRows.add(createExpectedRow("localhost:8080", "localhost", 8080, -1, NodeRole.PEON, "tier", 0L, 1000L, nonLeader));
    expectedRows.add(createExpectedRow("localhost:8081", "localhost", 8081, -1, NodeRole.COORDINATOR, null, 0L, 0L, 1L));
    expectedRows.add(createExpectedRow("localhost:8082", "localhost", 8082, -1, NodeRole.BROKER, null, 0L, 0L, nonLeader));
    expectedRows.add(createExpectedRow("localhost:8083", "localhost", 8083, -1, NodeRole.HISTORICAL, "tier", 200L, 1000L, nonLeader));
    expectedRows.add(createExpectedRow("localhost:8090", "localhost", 8090, -1, NodeRole.OVERLORD, null, 0L, 0L, 1L));
    expectedRows.add(createExpectedRow("localhost:8181", "localhost", 8181, -1, NodeRole.COORDINATOR, null, 0L, 0L, 0L));
    expectedRows.add(createExpectedRow("localhost:8190", "localhost", 8190, -1, NodeRole.OVERLORD, null, 0L, 0L, 0L));
    expectedRows.add(createExpectedRow("localhost:8888", "localhost", 8888, -1, NodeRole.ROUTER, null, 0L, 0L, nonLeader));
    expectedRows.add(createExpectedRow("mmHost:8091", "mmHost", 8091, -1, NodeRole.MIDDLE_MANAGER, null, 0L, 0L, nonLeader));
    expectedRows.add(createExpectedRow("peonHost:8080", "peonHost", 8080, -1, NodeRole.PEON, "tier", 0L, 1000L, nonLeader));
    Assert.assertEquals(expectedRows.size(), rows.size());
    for (int i = 0; i < rows.size(); i++) {
        Assert.assertArrayEquals(expectedRows.get(i), rows.get(i));
    }
    // Verify value types.
    verifyTypes(rows, SystemSchema.SERVERS_SIGNATURE);
}
Also used : DataContext(org.apache.calcite.DataContext) DruidNodeDiscovery(org.apache.druid.discovery.DruidNodeDiscovery) ArrayList(java.util.ArrayList) ImmutableDruidServer(org.apache.druid.client.ImmutableDruidServer) DruidServer(org.apache.druid.client.DruidServer) Test(org.junit.Test)

Aggregations

DataContext (org.apache.calcite.DataContext)47 Test (org.junit.Test)13 ArrayList (java.util.ArrayList)10 RexNode (org.apache.calcite.rex.RexNode)8 Nullable (org.checkerframework.checker.nullness.qual.Nullable)8 HashMap (java.util.HashMap)7 RelDataType (org.apache.calcite.rel.type.RelDataType)7 RelDataTypeFactory (org.apache.calcite.rel.type.RelDataTypeFactory)7 Values (org.apache.storm.tuple.Values)7 Context (org.apache.calcite.interpreter.Context)6 StormContext (org.apache.calcite.interpreter.StormContext)6 SQLException (java.sql.SQLException)5 AvaticaStatement (org.apache.calcite.avatica.AvaticaStatement)5 TypedValue (org.apache.calcite.avatica.remote.TypedValue)5 Enumerable (org.apache.calcite.linq4j.Enumerable)5 ScannableTable (org.apache.calcite.schema.ScannableTable)5 URL (java.net.URL)4 JavaTypeFactory (org.apache.calcite.adapter.java.JavaTypeFactory)4 Enumerator (org.apache.calcite.linq4j.Enumerator)4 RexBuilder (org.apache.calcite.rex.RexBuilder)4