Search in sources :

Example 1 with NetworkAddress

use of org.apache.hyracks.api.comm.NetworkAddress in project asterixdb by apache.

the class NodeManagerTest method mockNodeControllerState.

private NodeControllerState mockNodeControllerState(String nodeId, boolean invalidIpAddr) {
    NodeControllerState ncState = mock(NodeControllerState.class);
    String ipAddr = invalidIpAddr ? "255.255.255:255" : "127.0.0.2";
    NetworkAddress dataAddr = new NetworkAddress(ipAddr, 1001);
    NetworkAddress resultAddr = new NetworkAddress(ipAddr, 1002);
    NetworkAddress msgAddr = new NetworkAddress(ipAddr, 1003);
    when(ncState.getCapacity()).thenReturn(new NodeCapacity(NODE_MEMORY_SIZE, NODE_CORES));
    when(ncState.getDataPort()).thenReturn(dataAddr);
    when(ncState.getDatasetPort()).thenReturn(resultAddr);
    when(ncState.getMessagingPort()).thenReturn(msgAddr);
    NCConfig ncConfig = new NCConfig(nodeId);
    ncConfig.setDataPublicAddress(ipAddr);
    when(ncState.getNCConfig()).thenReturn(ncConfig);
    return ncState;
}
Also used : NetworkAddress(org.apache.hyracks.api.comm.NetworkAddress) NodeCapacity(org.apache.hyracks.api.job.resource.NodeCapacity) NCConfig(org.apache.hyracks.control.common.controllers.NCConfig) NodeControllerState(org.apache.hyracks.control.cc.NodeControllerState)

Example 2 with NetworkAddress

use of org.apache.hyracks.api.comm.NetworkAddress in project asterixdb by apache.

the class ConnectorApiServletTest method testGet.

@Test
public void testGet() throws Exception {
    // Starts test asterixdb cluster.
    SqlppExecutionTest.setUp();
    // Configures a test connector api servlet.
    ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" }, (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext());
    Map<String, NodeControllerInfo> nodeMap = new HashMap<>();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    PrintWriter outputWriter = new PrintWriter(outputStream);
    // Creates mocks.
    IHyracksClientConnection mockHcc = mock(IHyracksClientConnection.class);
    NodeControllerInfo mockInfo1 = mock(NodeControllerInfo.class);
    NodeControllerInfo mockInfo2 = mock(NodeControllerInfo.class);
    IServletRequest mockRequest = mock(IServletRequest.class);
    IServletResponse mockResponse = mock(IServletResponse.class);
    FullHttpRequest mockHttpRequest = mock(FullHttpRequest.class);
    // Put stuff in let map
    let.ctx().put(ServletConstants.HYRACKS_CONNECTION_ATTR, mockHcc);
    // Sets up mock returns.
    when(mockRequest.getHttpRequest()).thenReturn(mockHttpRequest);
    when(mockHttpRequest.method()).thenReturn(HttpMethod.GET);
    when(mockRequest.getParameter("dataverseName")).thenReturn("Metadata");
    when(mockRequest.getParameter("datasetName")).thenReturn("Dataset");
    when(mockResponse.writer()).thenReturn(outputWriter);
    when(mockHcc.getNodeControllerInfos()).thenReturn(nodeMap);
    when(mockInfo1.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.1", 3099));
    when(mockInfo2.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.2", 3099));
    // Calls ConnectorAPIServlet.formResponseObject.
    nodeMap.put("asterix_nc1", mockInfo1);
    nodeMap.put("asterix_nc2", mockInfo2);
    let.handle(mockRequest, mockResponse);
    // Constructs the actual response.
    ObjectMapper om = new ObjectMapper();
    ObjectNode actualResponse = (ObjectNode) om.readTree(outputStream.toString());
    // Checks the temp-or-not, primary key, data type of the dataset.
    boolean temp = actualResponse.get("temp").asBoolean();
    Assert.assertFalse(temp);
    String primaryKey = actualResponse.get("keys").asText();
    Assert.assertEquals("DataverseName,DatasetName", primaryKey);
    ARecordType recordType = (ARecordType) JSONDeserializerForTypes.convertFromJSON(actualResponse.get("type"));
    Assert.assertEquals(getMetadataRecordType("Metadata", "Dataset"), recordType);
    // Checks the correctness of results.
    ArrayNode splits = (ArrayNode) actualResponse.get("splits");
    String path = (splits.get(0)).get("path").asText();
    Assert.assertTrue(path.endsWith("Metadata/Dataset_idx_Dataset"));
    // Tears down the asterixdb cluster.
    SqlppExecutionTest.tearDown();
}
Also used : IHyracksClientConnection(org.apache.hyracks.api.client.IHyracksClientConnection) FullHttpRequest(io.netty.handler.codec.http.FullHttpRequest) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IServletRequest(org.apache.hyracks.http.api.IServletRequest) NetworkAddress(org.apache.hyracks.api.comm.NetworkAddress) NodeControllerInfo(org.apache.hyracks.api.client.NodeControllerInfo) ArrayNode(com.fasterxml.jackson.databind.node.ArrayNode) ConnectorApiServlet(org.apache.asterix.api.http.server.ConnectorApiServlet) IServletResponse(org.apache.hyracks.http.api.IServletResponse) ARecordType(org.apache.asterix.om.types.ARecordType) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) PrintWriter(java.io.PrintWriter) Test(org.junit.Test) SqlppExecutionTest(org.apache.asterix.test.runtime.SqlppExecutionTest)

Example 3 with NetworkAddress

use of org.apache.hyracks.api.comm.NetworkAddress in project asterixdb by apache.

the class SchedulerTest method testSchedulerLargerHDFS.

/**
     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
     *
     * @throws Exception
     */
public void testSchedulerLargerHDFS() throws Exception {
    int dataPort = 5099;
    int resultPort = 5098;
    int messagingPort = 5097;
    Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(4, "nc", "10.0.0.", dataPort, resultPort, messagingPort);
    ncNameToNcInfos.put("nc7", new NodeControllerInfo("nc7", NodeStatus.ALIVE, new NetworkAddress("10.0.0.7", dataPort), new NetworkAddress("10.0.0.5", resultPort), new NetworkAddress("10.0.0.5", messagingPort), 2));
    ncNameToNcInfos.put("nc12", new NodeControllerInfo("nc12", NodeStatus.ALIVE, new NetworkAddress("10.0.0.12", dataPort), new NetworkAddress("10.0.0.5", resultPort), new NetworkAddress("10.0.0.5", messagingPort), 2));
    InputSplit[] fileSplits = new InputSplit[12];
    fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
    fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
    fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" });
    fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" });
    fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
    fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
    fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
    fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
    fileSplits[8] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.14", "10.0.0.11", "10.0.0.13" });
    fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" });
    fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.7" });
    fileSplits[11] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" });
    Scheduler scheduler = new Scheduler(ncNameToNcInfos);
    String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
    String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc12", "nc7", "nc7", "nc12" };
    for (int i = 0; i < locationConstraints.length; i++) {
        Assert.assertEquals(locationConstraints[i], expectedResults[i]);
    }
    expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc7", "nc12", "nc7", "nc12" };
    ClusterTopology topology = parseTopology();
    scheduler = new Scheduler(ncNameToNcInfos, topology);
    locationConstraints = scheduler.getLocationConstraints(fileSplits);
    for (int i = 0; i < locationConstraints.length; i++) {
        Assert.assertEquals(locationConstraints[i], expectedResults[i]);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) NetworkAddress(org.apache.hyracks.api.comm.NetworkAddress) NodeControllerInfo(org.apache.hyracks.api.client.NodeControllerInfo) FileSplit(org.apache.hadoop.mapred.FileSplit) ClusterTopology(org.apache.hyracks.api.topology.ClusterTopology) InputSplit(org.apache.hadoop.mapred.InputSplit)

Example 4 with NetworkAddress

use of org.apache.hyracks.api.comm.NetworkAddress in project asterixdb by apache.

the class TestUtils method generateNodeControllerInfo.

public static Map<String, NodeControllerInfo> generateNodeControllerInfo(int numberOfNodes, String ncNamePrefix, String addressPrefix, int netPort, int dataPort, int messagingPort) {
    Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<>();
    for (int i = 1; i <= numberOfNodes; i++) {
        String ncId = ncNamePrefix + i;
        String ncAddress = addressPrefix + i;
        ncNameToNcInfos.put(ncId, new NodeControllerInfo(ncId, NodeStatus.ALIVE, new NetworkAddress(ncAddress, netPort), new NetworkAddress(ncAddress, dataPort), new NetworkAddress(ncAddress, messagingPort), 2));
    }
    return ncNameToNcInfos;
}
Also used : NetworkAddress(org.apache.hyracks.api.comm.NetworkAddress) HashMap(java.util.HashMap) NodeControllerInfo(org.apache.hyracks.api.client.NodeControllerInfo)

Example 5 with NetworkAddress

use of org.apache.hyracks.api.comm.NetworkAddress in project asterixdb by apache.

the class NetworkManager method start.

public void start() throws IOException {
    md.start();
    InetSocketAddress sockAddr = md.getLocalAddress();
    localNetworkAddress = new NetworkAddress(sockAddr.getHostString(), sockAddr.getPort());
    // make it a copy of localNetworkAddress
    if (publicNetworkAddress.getAddress() == null) {
        publicNetworkAddress = localNetworkAddress;
    } else {
        // Likewise for public port
        if (publicNetworkAddress.getPort() == 0) {
            publicNetworkAddress = new NetworkAddress(publicNetworkAddress.getAddress(), sockAddr.getPort());
        }
    }
}
Also used : NetworkAddress(org.apache.hyracks.api.comm.NetworkAddress) InetSocketAddress(java.net.InetSocketAddress)

Aggregations

NetworkAddress (org.apache.hyracks.api.comm.NetworkAddress)15 InetSocketAddress (java.net.InetSocketAddress)6 NodeControllerInfo (org.apache.hyracks.api.client.NodeControllerInfo)5 HashMap (java.util.HashMap)4 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 ArrayNode (com.fasterxml.jackson.databind.node.ArrayNode)2 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)2 ArrayList (java.util.ArrayList)2 List (java.util.List)2 Map (java.util.Map)2 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)2 ConnectorApiServlet (org.apache.asterix.api.http.server.ConnectorApiServlet)2 ARecordType (org.apache.asterix.om.types.ARecordType)2 SqlppExecutionTest (org.apache.asterix.test.runtime.SqlppExecutionTest)2 NodeControllerState (org.apache.hyracks.control.cc.NodeControllerState)2 INodeManager (org.apache.hyracks.control.cc.cluster.INodeManager)2 FullHttpRequest (io.netty.handler.codec.http.FullHttpRequest)1 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 PrintWriter (java.io.PrintWriter)1 Field (java.lang.reflect.Field)1