use of org.apache.thrift.protocol.TBinaryProtocol in project hive by apache.
the class QueryPlan method toBinaryString.
public String toBinaryString() throws IOException {
org.apache.hadoop.hive.ql.plan.api.Query q = getQueryPlan();
TMemoryBuffer tmb = new TMemoryBuffer(q.toString().length() * 5);
TBinaryProtocol oprot = new TBinaryProtocol(tmb);
try {
q.write(oprot);
} catch (TException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return q.toString();
}
byte[] buf = new byte[tmb.length()];
tmb.read(buf, 0, tmb.length());
return new String(buf);
// return getQueryPlan().toString();
}
use of org.apache.thrift.protocol.TBinaryProtocol in project hbase by apache.
the class TestThriftServerCmdLine method talkToThriftServer.
private void talkToThriftServer() throws Exception {
TSocket sock = new TSocket(InetAddress.getLocalHost().getHostName(), port);
TTransport transport = sock;
if (specifyFramed || implType.isAlwaysFramed) {
transport = new TFramedTransport(transport);
}
sock.open();
try {
TProtocol prot;
if (specifyCompact) {
prot = new TCompactProtocol(transport);
} else {
prot = new TBinaryProtocol(transport);
}
Hbase.Client client = new Hbase.Client(prot);
if (!tableCreated) {
TestThriftServer.createTestTables(client);
tableCreated = true;
}
TestThriftServer.checkTableList(client);
} finally {
sock.close();
}
}
use of org.apache.thrift.protocol.TBinaryProtocol in project hive by apache.
the class RetryingThriftCLIServiceClient method connect.
protected synchronized TTransport connect(HiveConf conf) throws HiveSQLException, TTransportException {
if (transport != null && transport.isOpen()) {
transport.close();
}
String host = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
int port = conf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT);
LOG.info("Connecting to " + host + ":" + port);
transport = new TSocket(host, port);
((TSocket) transport).setTimeout((int) conf.getTimeVar(HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT, TimeUnit.SECONDS) * 1000);
try {
((TSocket) transport).getSocket().setKeepAlive(conf.getBoolVar(HiveConf.ConfVars.SERVER_TCP_KEEP_ALIVE));
} catch (SocketException e) {
LOG.error("Error setting keep alive to " + conf.getBoolVar(HiveConf.ConfVars.SERVER_TCP_KEEP_ALIVE), e);
}
String userName = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_USER);
String passwd = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_PASSWORD);
try {
transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
} catch (SaslException e) {
LOG.error("Error creating plain SASL transport", e);
}
TProtocol protocol = new TBinaryProtocol(transport);
transport.open();
base = new ThriftCLIServiceClient(new TCLIService.Client(protocol), conf);
LOG.info("Connected!");
return transport;
}
use of org.apache.thrift.protocol.TBinaryProtocol in project storm by apache.
the class ThriftClient method reconnect.
public synchronized void reconnect() {
close();
TSocket socket = null;
try {
socket = new TSocket(_host, _port);
if (_timeout != null) {
socket.setTimeout(_timeout);
}
//locate login configuration
Configuration login_conf = AuthUtils.GetConfiguration(_conf);
//construct a transport plugin
ITransportPlugin transportPlugin = AuthUtils.GetTransportPlugin(_type, _conf, login_conf);
//TODO get this from type instead of hardcoding to Nimbus.
//establish client-server transport via plugin
//do retries if the connect fails
TBackoffConnect connectionRetry = new TBackoffConnect(Utils.getInt(_conf.get(Config.STORM_NIMBUS_RETRY_TIMES)), Utils.getInt(_conf.get(Config.STORM_NIMBUS_RETRY_INTERVAL)), Utils.getInt(_conf.get(Config.STORM_NIMBUS_RETRY_INTERVAL_CEILING)), _retryForever);
_transport = connectionRetry.doConnectWithRetry(transportPlugin, socket, _host, _asUser);
} catch (Exception ex) {
// close the socket, which releases connection if it has created any.
if (socket != null) {
try {
socket.close();
} catch (Exception e) {
}
}
throw new RuntimeException(ex);
}
_protocol = null;
if (_transport != null) {
_protocol = new TBinaryProtocol(_transport);
}
}
use of org.apache.thrift.protocol.TBinaryProtocol in project hbase by apache.
the class DemoClient method run.
private void run() throws Exception {
TTransport transport = new TSocket(host, port);
if (secure) {
Map<String, String> saslProperties = new HashMap<>();
saslProperties.put(Sasl.QOP, "auth-conf,auth-int,auth");
/**
* The Thrift server the DemoClient is trying to connect to
* must have a matching principal, and support authentication.
*
* The HBase cluster must be secure, allow proxy user.
*/
transport = new TSaslClientTransport("GSSAPI", null, // Thrift server user name, should be an authorized proxy user.
"hbase", // Thrift server domain
host, saslProperties, null, transport);
}
transport.open();
TProtocol protocol = new TBinaryProtocol(transport, true, true);
Hbase.Client client = new Hbase.Client(protocol);
byte[] t = bytes("demo_table");
//
// Scan all tables, look for the demo table and delete it.
//
System.out.println("scanning tables...");
for (ByteBuffer name : client.getTableNames()) {
System.out.println(" found: " + utf8(name.array()));
if (utf8(name.array()).equals(utf8(t))) {
if (client.isTableEnabled(name)) {
System.out.println(" disabling table: " + utf8(name.array()));
client.disableTable(name);
}
System.out.println(" deleting table: " + utf8(name.array()));
client.deleteTable(name);
}
}
//
// Create the demo table with two column families, entry: and unused:
//
ArrayList<ColumnDescriptor> columns = new ArrayList<>(2);
ColumnDescriptor col;
col = new ColumnDescriptor();
col.name = ByteBuffer.wrap(bytes("entry:"));
col.timeToLive = Integer.MAX_VALUE;
col.maxVersions = 10;
columns.add(col);
col = new ColumnDescriptor();
col.name = ByteBuffer.wrap(bytes("unused:"));
col.timeToLive = Integer.MAX_VALUE;
columns.add(col);
System.out.println("creating table: " + utf8(t));
try {
client.createTable(ByteBuffer.wrap(t), columns);
} catch (AlreadyExists ae) {
System.out.println("WARN: " + ae.message);
}
System.out.println("column families in " + utf8(t) + ": ");
Map<ByteBuffer, ColumnDescriptor> columnMap = client.getColumnDescriptors(ByteBuffer.wrap(t));
for (ColumnDescriptor col2 : columnMap.values()) {
System.out.println(" column: " + utf8(col2.name.array()) + ", maxVer: " + Integer.toString(col2.maxVersions));
}
Map<ByteBuffer, ByteBuffer> dummyAttributes = null;
boolean writeToWal = false;
//
// Test UTF-8 handling
//
byte[] invalid = { (byte) 'f', (byte) 'o', (byte) 'o', (byte) '-', (byte) 0xfc, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1 };
byte[] valid = { (byte) 'f', (byte) 'o', (byte) 'o', (byte) '-', (byte) 0xE7, (byte) 0x94, (byte) 0x9F, (byte) 0xE3, (byte) 0x83, (byte) 0x93, (byte) 0xE3, (byte) 0x83, (byte) 0xBC, (byte) 0xE3, (byte) 0x83, (byte) 0xAB };
ArrayList<Mutation> mutations;
// non-utf8 is fine for data
mutations = new ArrayList<>(1);
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(invalid), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(bytes("foo")), mutations, dummyAttributes);
// this row name is valid utf8
mutations = new ArrayList<>(1);
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(valid), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(valid), mutations, dummyAttributes);
// non-utf8 is now allowed in row names because HBase stores values as binary
mutations = new ArrayList<>(1);
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(invalid), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(invalid), mutations, dummyAttributes);
// Run a scanner on the rows we just created
ArrayList<ByteBuffer> columnNames = new ArrayList<>();
columnNames.add(ByteBuffer.wrap(bytes("entry:")));
System.out.println("Starting scanner...");
int scanner = client.scannerOpen(ByteBuffer.wrap(t), ByteBuffer.wrap(bytes("")), columnNames, dummyAttributes);
while (true) {
List<TRowResult> entry = client.scannerGet(scanner);
if (entry.isEmpty()) {
break;
}
printRow(entry);
}
//
for (int i = 100; i >= 0; --i) {
// format row keys as "00000" to "00100"
NumberFormat nf = NumberFormat.getInstance();
nf.setMinimumIntegerDigits(5);
nf.setGroupingUsed(false);
byte[] row = bytes(nf.format(i));
mutations = new ArrayList<>(1);
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("unused:")), ByteBuffer.wrap(bytes("DELETE_ME")), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
client.deleteAllRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes);
// sleep to force later timestamp
try {
Thread.sleep(50);
} catch (InterruptedException e) {
// no-op
}
mutations = new ArrayList<>(2);
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:num")), ByteBuffer.wrap(bytes("0")), writeToWal));
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(bytes("FOO")), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
Mutation m;
mutations = new ArrayList<>(2);
m = new Mutation();
m.column = ByteBuffer.wrap(bytes("entry:foo"));
m.isDelete = true;
mutations.add(m);
m = new Mutation();
m.column = ByteBuffer.wrap(bytes("entry:num"));
m.value = ByteBuffer.wrap(bytes("-1"));
mutations.add(m);
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
mutations = new ArrayList<>();
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:num")), ByteBuffer.wrap(bytes(Integer.toString(i))), writeToWal));
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:sqr")), ByteBuffer.wrap(bytes(Integer.toString(i * i))), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
// sleep to force later timestamp
try {
Thread.sleep(50);
} catch (InterruptedException e) {
// no-op
}
mutations.clear();
m = new Mutation();
m.column = ByteBuffer.wrap(bytes("entry:num"));
m.value = ByteBuffer.wrap(bytes("-999"));
mutations.add(m);
m = new Mutation();
m.column = ByteBuffer.wrap(bytes("entry:sqr"));
m.isDelete = true;
// shouldn't override latest
client.mutateRowTs(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, 1, dummyAttributes);
printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
List<TCell> versions = client.getVer(ByteBuffer.wrap(t), ByteBuffer.wrap(row), ByteBuffer.wrap(bytes("entry:num")), 10, dummyAttributes);
printVersions(ByteBuffer.wrap(row), versions);
if (versions.isEmpty()) {
System.out.println("FATAL: wrong # of versions");
System.exit(-1);
}
List<TCell> result = client.get(ByteBuffer.wrap(t), ByteBuffer.wrap(row), ByteBuffer.wrap(bytes("entry:foo")), dummyAttributes);
if (!result.isEmpty()) {
System.out.println("FATAL: shouldn't get here");
System.exit(-1);
}
System.out.println("");
}
// scan all rows/columnNames
columnNames.clear();
for (ColumnDescriptor col2 : client.getColumnDescriptors(ByteBuffer.wrap(t)).values()) {
System.out.println("column with name: " + new String(col2.name.array()));
System.out.println(col2.toString());
columnNames.add(col2.name);
}
System.out.println("Starting scanner...");
scanner = client.scannerOpenWithStop(ByteBuffer.wrap(t), ByteBuffer.wrap(bytes("00020")), ByteBuffer.wrap(bytes("00040")), columnNames, dummyAttributes);
while (true) {
List<TRowResult> entry = client.scannerGet(scanner);
if (entry.isEmpty()) {
System.out.println("Scanner finished");
break;
}
printRow(entry);
}
transport.close();
}
Aggregations