Search in sources :

Example 1 with Endpoint

use of org.apache.hadoop.registry.client.types.Endpoint in project hadoop by apache.

the class RegistryTestHelper method validateEntry.

/**
   * General code to validate bits of a component/service entry built iwth
   * {@link #addSampleEndpoints(ServiceRecord, String)}
   * @param record instance to check
   */
public static void validateEntry(ServiceRecord record) {
    assertNotNull("null service record", record);
    List<Endpoint> endpoints = record.external;
    assertEquals(2, endpoints.size());
    Endpoint webhdfs = findEndpoint(record, API_WEBHDFS, true, 1, 1);
    assertEquals(API_WEBHDFS, webhdfs.api);
    assertEquals(AddressTypes.ADDRESS_URI, webhdfs.addressType);
    assertEquals(ProtocolTypes.PROTOCOL_REST, webhdfs.protocolType);
    List<Map<String, String>> addressList = webhdfs.addresses;
    Map<String, String> url = addressList.get(0);
    String addr = url.get("uri");
    assertTrue(addr.contains("http"));
    assertTrue(addr.contains(":9820"));
    Endpoint nnipc = findEndpoint(record, NNIPC, false, 1, 2);
    assertEquals("wrong protocol in " + nnipc, ProtocolTypes.PROTOCOL_THRIFT, nnipc.protocolType);
    Endpoint ipc2 = findEndpoint(record, IPC2, false, 1, 2);
    assertNotNull(ipc2);
    Endpoint web = findEndpoint(record, HTTP_API, true, 1, 1);
    assertEquals(1, web.addresses.size());
    assertEquals(1, web.addresses.get(0).size());
}
Also used : Endpoint(org.apache.hadoop.registry.client.types.Endpoint) Map(java.util.Map)

Example 2 with Endpoint

use of org.apache.hadoop.registry.client.types.Endpoint in project hadoop by apache.

the class RegistryTestHelper method addSampleEndpoints.

/**
   * Add some endpoints
   * @param entry entry
   */
public static void addSampleEndpoints(ServiceRecord entry, String hostname) throws URISyntaxException {
    assertNotNull(hostname);
    entry.addExternalEndpoint(webEndpoint(HTTP_API, new URI("http", hostname + ":80", "/")));
    entry.addExternalEndpoint(restEndpoint(API_WEBHDFS, new URI("http", hostname + ":9820", "/")));
    Endpoint endpoint = ipcEndpoint(API_HDFS, null);
    endpoint.addresses.add(RegistryTypeUtils.hostnamePortPair(hostname, 8030));
    entry.addInternalEndpoint(endpoint);
    InetSocketAddress localhost = new InetSocketAddress("localhost", 8050);
    entry.addInternalEndpoint(inetAddrEndpoint(NNIPC, ProtocolTypes.PROTOCOL_THRIFT, "localhost", 8050));
    entry.addInternalEndpoint(RegistryTypeUtils.ipcEndpoint(IPC2, localhost));
}
Also used : Endpoint(org.apache.hadoop.registry.client.types.Endpoint) InetSocketAddress(java.net.InetSocketAddress) URI(java.net.URI)

Example 3 with Endpoint

use of org.apache.hadoop.registry.client.types.Endpoint in project hadoop by apache.

the class RegistryCli method resolve.

@SuppressWarnings("unchecked")
public int resolve(String[] args) {
    Options resolveOption = new Options();
    CommandLineParser parser = new GnuParser();
    try {
        CommandLine line = parser.parse(resolveOption, args);
        List<String> argsList = line.getArgList();
        if (argsList.size() != 2) {
            return usageError("resolve requires exactly one path argument", RESOLVE_USAGE);
        }
        if (!validatePath(argsList.get(1))) {
            return -1;
        }
        try {
            ServiceRecord record = registry.resolve(argsList.get(1));
            for (Endpoint endpoint : record.external) {
                sysout.println(" Endpoint(ProtocolType=" + endpoint.protocolType + ", Api=" + endpoint.api + ");" + " Addresses(AddressType=" + endpoint.addressType + ") are: ");
                for (Map<String, String> address : endpoint.addresses) {
                    sysout.println("[ ");
                    for (Map.Entry<String, String> entry : address.entrySet()) {
                        sysout.print("\t" + entry.getKey() + ":" + entry.getValue());
                    }
                    sysout.println("\n]");
                }
                sysout.println();
            }
            return 0;
        } catch (Exception e) {
            syserr.println(analyzeException("resolve", e, argsList));
        }
        return -1;
    } catch (ParseException exp) {
        return usageError("Invalid syntax " + exp, RESOLVE_USAGE);
    }
}
Also used : Options(org.apache.commons.cli.Options) GnuParser(org.apache.commons.cli.GnuParser) URISyntaxException(java.net.URISyntaxException) InvalidRecordException(org.apache.hadoop.registry.client.exceptions.InvalidRecordException) InvalidPathnameException(org.apache.hadoop.registry.client.exceptions.InvalidPathnameException) AuthenticationFailedException(org.apache.hadoop.registry.client.exceptions.AuthenticationFailedException) PathNotFoundException(org.apache.hadoop.fs.PathNotFoundException) NoRecordException(org.apache.hadoop.registry.client.exceptions.NoRecordException) IOException(java.io.IOException) ParseException(org.apache.commons.cli.ParseException) AccessControlException(org.apache.hadoop.security.AccessControlException) NoPathPermissionsException(org.apache.hadoop.registry.client.exceptions.NoPathPermissionsException) ServiceRecord(org.apache.hadoop.registry.client.types.ServiceRecord) CommandLine(org.apache.commons.cli.CommandLine) Endpoint(org.apache.hadoop.registry.client.types.Endpoint) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException) Map(java.util.Map)

Example 4 with Endpoint

use of org.apache.hadoop.registry.client.types.Endpoint in project hive by apache.

the class LlapZookeeperRegistryImpl method register.

@Override
public String register() throws IOException {
    ServiceRecord srv = new ServiceRecord();
    Endpoint rpcEndpoint = getRpcEndpoint();
    srv.addInternalEndpoint(rpcEndpoint);
    srv.addInternalEndpoint(getMngEndpoint());
    srv.addInternalEndpoint(getShuffleEndpoint());
    srv.addExternalEndpoint(getServicesEndpoint());
    srv.addInternalEndpoint(getOutputFormatEndpoint());
    for (Map.Entry<String, String> kv : this.conf) {
        if (kv.getKey().startsWith(HiveConf.PREFIX_LLAP) || kv.getKey().startsWith(HiveConf.PREFIX_HIVE_LLAP)) {
            // TODO: read this somewhere useful, like the task scheduler
            srv.set(kv.getKey(), kv.getValue());
        }
    }
    // restart sensitive instance id
    srv.set(UNIQUE_IDENTIFIER, uniq.toString());
    // Create a znode under the rootNamespace parent for this instance of the server
    try {
        // PersistentEphemeralNode will make sure the ephemeral node created on server will be present
        // even under connection or session interruption (will automatically handle retries)
        znode = new PersistentEphemeralNode(zooKeeperClient, Mode.EPHEMERAL_SEQUENTIAL, workersPath + "/" + WORKER_PREFIX, encoder.toBytes(srv));
        // start the creation of znodes
        znode.start();
        // We'll wait for 120s for node creation
        long znodeCreationTimeout = 120;
        if (!znode.waitForInitialCreate(znodeCreationTimeout, TimeUnit.SECONDS)) {
            throw new Exception("Max znode creation wait time: " + znodeCreationTimeout + "s exhausted");
        }
        znodePath = znode.getActualPath();
        slotZnode = new SlotZnode(zooKeeperClient, workersPath, SLOT_PREFIX, WORKER_PREFIX, uniq.toString());
        if (!slotZnode.start(znodeCreationTimeout, TimeUnit.SECONDS)) {
            throw new Exception("Max znode creation wait time: " + znodeCreationTimeout + "s exhausted");
        }
        if (HiveConf.getBoolVar(conf, ConfVars.LLAP_VALIDATE_ACLS)) {
            try {
                checkAndSetAcls();
            } catch (Exception ex) {
                throw new IOException("Error validating or setting ACLs. " + DISABLE_MESSAGE, ex);
            }
        }
        if (zooKeeperClient.checkExists().forPath(znodePath) == null) {
            // No node exists, throw exception
            throw new Exception("Unable to create znode for this LLAP instance on ZooKeeper.");
        }
        LOG.info("Registered node. Created a znode on ZooKeeper for LLAP instance: rpc: {}, shuffle: {}," + " webui: {}, mgmt: {}, znodePath: {} ", rpcEndpoint, getShuffleEndpoint(), getServicesEndpoint(), getMngEndpoint(), znodePath);
    } catch (Exception e) {
        LOG.error("Unable to create a znode for this server instance", e);
        CloseableUtils.closeQuietly(znode);
        CloseableUtils.closeQuietly(slotZnode);
        throw (e instanceof IOException) ? (IOException) e : new IOException(e);
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("Created zknode with path: {} service record: {}", znodePath, srv);
    }
    return uniq.toString();
}
Also used : Endpoint(org.apache.hadoop.registry.client.types.Endpoint) PersistentEphemeralNode(org.apache.curator.framework.recipes.nodes.PersistentEphemeralNode) IOException(java.io.IOException) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) TreeMap(java.util.TreeMap) URISyntaxException(java.net.URISyntaxException) MalformedURLException(java.net.MalformedURLException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) InvalidACLException(org.apache.zookeeper.KeeperException.InvalidACLException) ServiceRecord(org.apache.hadoop.registry.client.types.ServiceRecord)

Example 5 with Endpoint

use of org.apache.hadoop.registry.client.types.Endpoint in project jstorm by alibaba.

the class JstormMaster method setupServiceRecord.

private ServiceRecord setupServiceRecord() {
    ServiceRecord application = new ServiceRecord();
    application.set(YarnRegistryAttributes.YARN_ID, jstormMasterContext.appAttemptID.getApplicationId().toString());
    application.description = JOYConstants.AM;
    application.set(YarnRegistryAttributes.YARN_PERSISTENCE, PersistencePolicies.PERMANENT);
    Map<String, String> addresses = new HashMap<String, String>();
    addresses.put(JOYConstants.HOST, jstormMasterContext.appMasterHostname);
    addresses.put(JOYConstants.PORT, String.valueOf(jstormMasterContext.appMasterThriftPort));
    Endpoint endpoint = new Endpoint(JOYConstants.HTTP, JOYConstants.HOST_PORT, JOYConstants.RPC, addresses);
    application.addExternalEndpoint(endpoint);
    return application;
}
Also used : Endpoint(org.apache.hadoop.registry.client.types.Endpoint) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ServiceRecord(org.apache.hadoop.registry.client.types.ServiceRecord)

Aggregations

Endpoint (org.apache.hadoop.registry.client.types.Endpoint)7 URISyntaxException (java.net.URISyntaxException)3 Map (java.util.Map)3 ServiceRecord (org.apache.hadoop.registry.client.types.ServiceRecord)3 IOException (java.io.IOException)2 MalformedURLException (java.net.MalformedURLException)2 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)2 InetSocketAddress (java.net.InetSocketAddress)1 URI (java.net.URI)1 URL (java.net.URL)1 UnknownHostException (java.net.UnknownHostException)1 HashMap (java.util.HashMap)1 TreeMap (java.util.TreeMap)1 CommandLine (org.apache.commons.cli.CommandLine)1 CommandLineParser (org.apache.commons.cli.CommandLineParser)1 GnuParser (org.apache.commons.cli.GnuParser)1 Options (org.apache.commons.cli.Options)1 ParseException (org.apache.commons.cli.ParseException)1 PersistentEphemeralNode (org.apache.curator.framework.recipes.nodes.PersistentEphemeralNode)1 PathNotFoundException (org.apache.hadoop.fs.PathNotFoundException)1