Search in sources :

Example 1 with TProtocol

use of org.apache.thrift.protocol.TProtocol in project hbase by apache.

the class HttpDoAsClient method run.

private void run() throws Exception {
    TTransport transport = new TSocket(host, port);
    transport.open();
    String url = "http://" + host + ":" + port;
    THttpClient httpClient = new THttpClient(url);
    httpClient.open();
    TProtocol protocol = new TBinaryProtocol(httpClient);
    Hbase.Client client = new Hbase.Client(protocol);
    byte[] t = bytes("demo_table");
    //
    // Scan all tables, look for the demo table and delete it.
    //
    System.out.println("scanning tables...");
    for (ByteBuffer name : refresh(client, httpClient).getTableNames()) {
        System.out.println("  found: " + utf8(name.array()));
        if (utf8(name.array()).equals(utf8(t))) {
            if (refresh(client, httpClient).isTableEnabled(name)) {
                System.out.println("    disabling table: " + utf8(name.array()));
                refresh(client, httpClient).disableTable(name);
            }
            System.out.println("    deleting table: " + utf8(name.array()));
            refresh(client, httpClient).deleteTable(name);
        }
    }
    //
    // Create the demo table with two column families, entry: and unused:
    //
    ArrayList<ColumnDescriptor> columns = new ArrayList<>(2);
    ColumnDescriptor col;
    col = new ColumnDescriptor();
    col.name = ByteBuffer.wrap(bytes("entry:"));
    col.timeToLive = Integer.MAX_VALUE;
    col.maxVersions = 10;
    columns.add(col);
    col = new ColumnDescriptor();
    col.name = ByteBuffer.wrap(bytes("unused:"));
    col.timeToLive = Integer.MAX_VALUE;
    columns.add(col);
    System.out.println("creating table: " + utf8(t));
    try {
        refresh(client, httpClient).createTable(ByteBuffer.wrap(t), columns);
    } catch (AlreadyExists ae) {
        System.out.println("WARN: " + ae.message);
    }
    System.out.println("column families in " + utf8(t) + ": ");
    Map<ByteBuffer, ColumnDescriptor> columnMap = refresh(client, httpClient).getColumnDescriptors(ByteBuffer.wrap(t));
    for (ColumnDescriptor col2 : columnMap.values()) {
        System.out.println("  column: " + utf8(col2.name.array()) + ", maxVer: " + Integer.toString(col2.maxVersions));
    }
    transport.close();
    httpClient.close();
}
Also used : ColumnDescriptor(org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor) ArrayList(java.util.ArrayList) THttpClient(org.apache.thrift.transport.THttpClient) ByteBuffer(java.nio.ByteBuffer) TBinaryProtocol(org.apache.thrift.protocol.TBinaryProtocol) TProtocol(org.apache.thrift.protocol.TProtocol) TTransport(org.apache.thrift.transport.TTransport) THttpClient(org.apache.thrift.transport.THttpClient) AlreadyExists(org.apache.hadoop.hbase.thrift.generated.AlreadyExists) Hbase(org.apache.hadoop.hbase.thrift.generated.Hbase) TSocket(org.apache.thrift.transport.TSocket)

Example 2 with TProtocol

use of org.apache.thrift.protocol.TProtocol in project hive by apache.

the class ThriftBinaryCLIService method run.

@Override
public void run() {
    try {
        // Server thread pool
        String threadPoolName = "HiveServer2-Handler-Pool";
        ExecutorService executorService = new ThreadPoolExecutorWithOomHook(minWorkerThreads, maxWorkerThreads, workerKeepAliveTime, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), new ThreadFactoryWithGarbageCleanup(threadPoolName), oomHook);
        // Thrift configs
        hiveAuthFactory = new HiveAuthFactory(hiveConf);
        TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory();
        TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
        TServerSocket serverSocket = null;
        List<String> sslVersionBlacklist = new ArrayList<String>();
        for (String sslVersion : hiveConf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",")) {
            sslVersionBlacklist.add(sslVersion);
        }
        if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL)) {
            serverSocket = HiveAuthUtils.getServerSocket(hiveHost, portNum);
        } else {
            String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
            if (keyStorePath.isEmpty()) {
                throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname + " Not configured for SSL connection");
            }
            String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf, HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
            serverSocket = HiveAuthUtils.getServerSSLSocket(hiveHost, portNum, keyStorePath, keyStorePassword, sslVersionBlacklist);
        }
        // Server args
        int maxMessageSize = hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE);
        int requestTimeout = (int) hiveConf.getTimeVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_TIMEOUT, TimeUnit.SECONDS);
        int beBackoffSlotLength = (int) hiveConf.getTimeVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_BEBACKOFF_SLOT_LENGTH, TimeUnit.MILLISECONDS);
        TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(serverSocket).processorFactory(processorFactory).transportFactory(transportFactory).protocolFactory(new TBinaryProtocol.Factory()).inputProtocolFactory(new TBinaryProtocol.Factory(true, true, maxMessageSize, maxMessageSize)).requestTimeout(requestTimeout).requestTimeoutUnit(TimeUnit.SECONDS).beBackoffSlotLength(beBackoffSlotLength).beBackoffSlotLengthUnit(TimeUnit.MILLISECONDS).executorService(executorService);
        // TCP Server
        server = new TThreadPoolServer(sargs);
        server.setServerEventHandler(new TServerEventHandler() {

            @Override
            public ServerContext createContext(TProtocol input, TProtocol output) {
                Metrics metrics = MetricsFactory.getInstance();
                if (metrics != null) {
                    try {
                        metrics.incrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                        metrics.incrementCounter(MetricsConstant.CUMULATIVE_CONNECTION_COUNT);
                    } catch (Exception e) {
                        LOG.warn("Error Reporting JDO operation to Metrics system", e);
                    }
                }
                return new ThriftCLIServerContext();
            }

            @Override
            public void deleteContext(ServerContext serverContext, TProtocol input, TProtocol output) {
                Metrics metrics = MetricsFactory.getInstance();
                if (metrics != null) {
                    try {
                        metrics.decrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                    } catch (Exception e) {
                        LOG.warn("Error Reporting JDO operation to Metrics system", e);
                    }
                }
                ThriftCLIServerContext context = (ThriftCLIServerContext) serverContext;
                SessionHandle sessionHandle = context.getSessionHandle();
                if (sessionHandle != null) {
                    LOG.info("Session disconnected without closing properly. ");
                    try {
                        boolean close = cliService.getSessionManager().getSession(sessionHandle).getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_CLOSE_SESSION_ON_DISCONNECT);
                        LOG.info((close ? "" : "Not ") + "Closing the session: " + sessionHandle);
                        if (close) {
                            cliService.closeSession(sessionHandle);
                        }
                    } catch (HiveSQLException e) {
                        LOG.warn("Failed to close session: " + e, e);
                    }
                }
            }

            @Override
            public void preServe() {
            }

            @Override
            public void processContext(ServerContext serverContext, TTransport input, TTransport output) {
                currentServerContext.set(serverContext);
            }
        });
        String msg = "Starting " + ThriftBinaryCLIService.class.getSimpleName() + " on port " + portNum + " with " + minWorkerThreads + "..." + maxWorkerThreads + " worker threads";
        LOG.info(msg);
        server.serve();
    } catch (Throwable t) {
        LOG.error("Error starting HiveServer2: could not start " + ThriftBinaryCLIService.class.getSimpleName(), t);
        System.exit(-1);
    }
}
Also used : ThreadFactoryWithGarbageCleanup(org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup) TServerEventHandler(org.apache.thrift.server.TServerEventHandler) ArrayList(java.util.ArrayList) HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) TProcessorFactory(org.apache.thrift.TProcessorFactory) MetricsFactory(org.apache.hadoop.hive.common.metrics.common.MetricsFactory) TTransportFactory(org.apache.thrift.transport.TTransportFactory) TProcessorFactory(org.apache.thrift.TProcessorFactory) TServerSocket(org.apache.thrift.transport.TServerSocket) Metrics(org.apache.hadoop.hive.common.metrics.common.Metrics) TProtocol(org.apache.thrift.protocol.TProtocol) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) SessionHandle(org.apache.hive.service.cli.SessionHandle) TTransportFactory(org.apache.thrift.transport.TTransportFactory) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) TBinaryProtocol(org.apache.thrift.protocol.TBinaryProtocol) ServerContext(org.apache.thrift.server.ServerContext) ExecutorService(java.util.concurrent.ExecutorService) HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) TTransport(org.apache.thrift.transport.TTransport) TThreadPoolServer(org.apache.thrift.server.TThreadPoolServer)

Example 3 with TProtocol

use of org.apache.thrift.protocol.TProtocol in project hbase by apache.

the class ThriftServerRunner method setupServer.

/**
   * Setting up the thrift TServer
   */
private void setupServer() throws Exception {
    // Construct correct ProtocolFactory
    TProtocolFactory protocolFactory;
    if (conf.getBoolean(COMPACT_CONF_KEY, false)) {
        LOG.debug("Using compact protocol");
        protocolFactory = new TCompactProtocol.Factory();
    } else {
        LOG.debug("Using binary protocol");
        protocolFactory = new TBinaryProtocol.Factory();
    }
    final TProcessor p = new Hbase.Processor<>(handler);
    ImplType implType = ImplType.getServerImpl(conf);
    TProcessor processor = p;
    // Construct correct TransportFactory
    TTransportFactory transportFactory;
    if (conf.getBoolean(FRAMED_CONF_KEY, false) || implType.isAlwaysFramed) {
        if (qop != null) {
            throw new RuntimeException("Thrift server authentication" + " doesn't work with framed transport yet");
        }
        transportFactory = new TFramedTransport.Factory(conf.getInt(MAX_FRAME_SIZE_CONF_KEY, 2) * 1024 * 1024);
        LOG.debug("Using framed transport");
    } else if (qop == null) {
        transportFactory = new TTransportFactory();
    } else {
        // Extract the name from the principal
        String name = SecurityUtil.getUserFromPrincipal(conf.get("hbase.thrift.kerberos.principal"));
        Map<String, String> saslProperties = new HashMap<>();
        saslProperties.put(Sasl.QOP, qop);
        TSaslServerTransport.Factory saslFactory = new TSaslServerTransport.Factory();
        saslFactory.addServerDefinition("GSSAPI", name, host, saslProperties, new SaslGssCallbackHandler() {

            @Override
            public void handle(Callback[] callbacks) throws UnsupportedCallbackException {
                AuthorizeCallback ac = null;
                for (Callback callback : callbacks) {
                    if (callback instanceof AuthorizeCallback) {
                        ac = (AuthorizeCallback) callback;
                    } else {
                        throw new UnsupportedCallbackException(callback, "Unrecognized SASL GSSAPI Callback");
                    }
                }
                if (ac != null) {
                    String authid = ac.getAuthenticationID();
                    String authzid = ac.getAuthorizationID();
                    if (!authid.equals(authzid)) {
                        ac.setAuthorized(false);
                    } else {
                        ac.setAuthorized(true);
                        String userName = SecurityUtil.getUserFromPrincipal(authzid);
                        LOG.info("Effective user: " + userName);
                        ac.setAuthorizedID(userName);
                    }
                }
            }
        });
        transportFactory = saslFactory;
        // Create a processor wrapper, to get the caller
        processor = new TProcessor() {

            @Override
            public boolean process(TProtocol inProt, TProtocol outProt) throws TException {
                TSaslServerTransport saslServerTransport = (TSaslServerTransport) inProt.getTransport();
                SaslServer saslServer = saslServerTransport.getSaslServer();
                String principal = saslServer.getAuthorizationID();
                hbaseHandler.setEffectiveUser(principal);
                return p.process(inProt, outProt);
            }
        };
    }
    if (conf.get(BIND_CONF_KEY) != null && !implType.canSpecifyBindIP) {
        LOG.error("Server types " + Joiner.on(", ").join(ImplType.serversThatCannotSpecifyBindIP()) + " don't support IP " + "address binding at the moment. See " + "https://issues.apache.org/jira/browse/HBASE-2155 for details.");
        throw new RuntimeException("-" + BIND_CONF_KEY + " not supported with " + implType);
    }
    // Thrift's implementation uses '0' as a placeholder for 'use the default.'
    int backlog = conf.getInt(BACKLOG_CONF_KEY, 0);
    if (implType == ImplType.HS_HA || implType == ImplType.NONBLOCKING || implType == ImplType.THREADED_SELECTOR) {
        InetAddress listenAddress = getBindAddress(conf);
        TNonblockingServerTransport serverTransport = new TNonblockingServerSocket(new InetSocketAddress(listenAddress, listenPort));
        if (implType == ImplType.NONBLOCKING) {
            TNonblockingServer.Args serverArgs = new TNonblockingServer.Args(serverTransport);
            serverArgs.processor(processor).transportFactory(transportFactory).protocolFactory(protocolFactory);
            tserver = new TNonblockingServer(serverArgs);
        } else if (implType == ImplType.HS_HA) {
            THsHaServer.Args serverArgs = new THsHaServer.Args(serverTransport);
            CallQueue callQueue = new CallQueue(new LinkedBlockingQueue<>(), metrics);
            ExecutorService executorService = createExecutor(callQueue, serverArgs.getMaxWorkerThreads(), serverArgs.getMaxWorkerThreads());
            serverArgs.executorService(executorService).processor(processor).transportFactory(transportFactory).protocolFactory(protocolFactory);
            tserver = new THsHaServer(serverArgs);
        } else {
            // THREADED_SELECTOR
            TThreadedSelectorServer.Args serverArgs = new HThreadedSelectorServerArgs(serverTransport, conf);
            CallQueue callQueue = new CallQueue(new LinkedBlockingQueue<>(), metrics);
            ExecutorService executorService = createExecutor(callQueue, serverArgs.getWorkerThreads(), serverArgs.getWorkerThreads());
            serverArgs.executorService(executorService).processor(processor).transportFactory(transportFactory).protocolFactory(protocolFactory);
            tserver = new TThreadedSelectorServer(serverArgs);
        }
        LOG.info("starting HBase " + implType.simpleClassName() + " server on " + Integer.toString(listenPort));
    } else if (implType == ImplType.THREAD_POOL) {
        // Thread pool server. Get the IP address to bind to.
        InetAddress listenAddress = getBindAddress(conf);
        int readTimeout = conf.getInt(THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY, THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT);
        TServerTransport serverTransport = new TServerSocket(new TServerSocket.ServerSocketTransportArgs().bindAddr(new InetSocketAddress(listenAddress, listenPort)).backlog(backlog).clientTimeout(readTimeout));
        TBoundedThreadPoolServer.Args serverArgs = new TBoundedThreadPoolServer.Args(serverTransport, conf);
        serverArgs.processor(processor).transportFactory(transportFactory).protocolFactory(protocolFactory);
        LOG.info("starting " + ImplType.THREAD_POOL.simpleClassName() + " on " + listenAddress + ":" + Integer.toString(listenPort) + " with readTimeout " + readTimeout + "ms; " + serverArgs);
        TBoundedThreadPoolServer tserver = new TBoundedThreadPoolServer(serverArgs, metrics);
        this.tserver = tserver;
    } else {
        throw new AssertionError("Unsupported Thrift server implementation: " + implType.simpleClassName());
    }
    // A sanity check that we instantiated the right type of server.
    if (tserver.getClass() != implType.serverClass) {
        throw new AssertionError("Expected to create Thrift server class " + implType.serverClass.getName() + " but got " + tserver.getClass().getName());
    }
    registerFilters(conf);
}
Also used : TProtocolFactory(org.apache.thrift.protocol.TProtocolFactory) TNonblockingServerTransport(org.apache.thrift.transport.TNonblockingServerTransport) TProcessor(org.apache.thrift.TProcessor) SaslServer(javax.security.sasl.SaslServer) InetSocketAddress(java.net.InetSocketAddress) TThreadedSelectorServer(org.apache.thrift.server.TThreadedSelectorServer) LogFactory(org.apache.commons.logging.LogFactory) TProtocolFactory(org.apache.thrift.protocol.TProtocolFactory) TTransportFactory(org.apache.thrift.transport.TTransportFactory) SslContextFactory(org.eclipse.jetty.util.ssl.SslContextFactory) TCompactProtocol(org.apache.thrift.protocol.TCompactProtocol) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) AuthorizeCallback(javax.security.sasl.AuthorizeCallback) TServerSocket(org.apache.thrift.transport.TServerSocket) THsHaServer(org.apache.thrift.server.THsHaServer) TProcessor(org.apache.thrift.TProcessor) TProtocol(org.apache.thrift.protocol.TProtocol) TFramedTransport(org.apache.thrift.transport.TFramedTransport) UnsupportedCallbackException(javax.security.auth.callback.UnsupportedCallbackException) TTransportFactory(org.apache.thrift.transport.TTransportFactory) TNonblockingServer(org.apache.thrift.server.TNonblockingServer) TServerTransport(org.apache.thrift.transport.TServerTransport) TSaslServerTransport(org.apache.thrift.transport.TSaslServerTransport) SaslGssCallbackHandler(org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler) Callback(javax.security.auth.callback.Callback) AuthorizeCallback(javax.security.sasl.AuthorizeCallback) TBinaryProtocol(org.apache.thrift.protocol.TBinaryProtocol) TNonblockingServerSocket(org.apache.thrift.transport.TNonblockingServerSocket) ExecutorService(java.util.concurrent.ExecutorService) Map(java.util.Map) TreeMap(java.util.TreeMap) HashMap(java.util.HashMap) InetAddress(java.net.InetAddress)

Example 4 with TProtocol

use of org.apache.thrift.protocol.TProtocol in project hbase by apache.

the class TestThriftHttpServer method talkToThriftServer.

private void talkToThriftServer(int customHeaderSize) throws Exception {
    THttpClient httpClient = new THttpClient("http://" + HConstants.LOCALHOST + ":" + port);
    httpClient.open();
    if (customHeaderSize > 0) {
        StringBuilder sb = new StringBuilder();
        for (int i = 0; i < customHeaderSize; i++) {
            sb.append("a");
        }
        httpClient.setCustomHeader("User-Agent", sb.toString());
    }
    try {
        TProtocol prot;
        prot = new TBinaryProtocol(httpClient);
        Hbase.Client client = new Hbase.Client(prot);
        if (!tableCreated) {
            TestThriftServer.createTestTables(client);
            tableCreated = true;
        }
        TestThriftServer.checkTableList(client);
    } finally {
        httpClient.close();
    }
}
Also used : TBinaryProtocol(org.apache.thrift.protocol.TBinaryProtocol) TProtocol(org.apache.thrift.protocol.TProtocol) THttpClient(org.apache.thrift.transport.THttpClient) THttpClient(org.apache.thrift.transport.THttpClient) Hbase(org.apache.hadoop.hbase.thrift.generated.Hbase)

Example 5 with TProtocol

use of org.apache.thrift.protocol.TProtocol in project hive by apache.

the class HBaseReadWrite method dumpThriftObject.

private String dumpThriftObject(TBase obj) throws TException, UnsupportedEncodingException {
    TMemoryBuffer buf = new TMemoryBuffer(1000);
    TProtocol protocol = new TSimpleJSONProtocol(buf);
    obj.write(protocol);
    return buf.toString("UTF-8");
}
Also used : TSimpleJSONProtocol(org.apache.thrift.protocol.TSimpleJSONProtocol) TMemoryBuffer(org.apache.thrift.transport.TMemoryBuffer) TProtocol(org.apache.thrift.protocol.TProtocol)

Aggregations

TProtocol (org.apache.thrift.protocol.TProtocol)78 TBinaryProtocol (org.apache.thrift.protocol.TBinaryProtocol)29 TTransport (org.apache.thrift.transport.TTransport)28 TSocket (org.apache.thrift.transport.TSocket)23 TException (org.apache.thrift.TException)22 TFramedTransport (org.apache.thrift.transport.TFramedTransport)17 IOException (java.io.IOException)15 TCompactProtocol (org.apache.thrift.protocol.TCompactProtocol)10 TIOStreamTransport (org.apache.thrift.transport.TIOStreamTransport)10 THttpClient (org.apache.thrift.transport.THttpClient)9 TTransportException (org.apache.thrift.transport.TTransportException)8 ByteArrayOutputStream (java.io.ByteArrayOutputStream)7 TJSONProtocol (org.apache.thrift.protocol.TJSONProtocol)7 RDF_Term (org.apache.jena.riot.thrift.wire.RDF_Term)6 InputStream (java.io.InputStream)5 ArrayList (java.util.ArrayList)5 TProcessor (org.apache.thrift.TProcessor)5 Hbase (org.apache.hadoop.hbase.thrift.generated.Hbase)4 TTransportFactory (org.apache.thrift.transport.TTransportFactory)4 OutputStream (java.io.OutputStream)3