use of org.apache.thrift.transport.TTransport in project storm by apache.
the class KerberosSaslTransportPlugin method connect.
@Override
public TTransport connect(TTransport transport, String serverHost, String asUser) throws TTransportException, IOException {
//create an authentication callback handler
ClientCallbackHandler client_callback_handler = new ClientCallbackHandler(login_conf);
//login our user
LoginCacheKey key = new LoginCacheKey(login_conf, AuthUtils.LOGIN_CONTEXT_CLIENT);
Login login = loginCache.get(key);
if (login == null) {
LOG.debug("Kerberos Login was not found in the Login Cache, attempting to contact the Kerberos Server");
synchronized (loginCache) {
login = loginCache.get(key);
if (login == null) {
try {
//specify a configuration object to be used
Configuration.setConfiguration(login_conf);
//now login
login = new Login(AuthUtils.LOGIN_CONTEXT_CLIENT, client_callback_handler);
login.startThreadIfNeeded();
loginCache.put(key, login);
} catch (LoginException ex) {
LOG.error("Server failed to login in principal:" + ex, ex);
throw new RuntimeException(ex);
}
}
}
}
final Subject subject = login.getSubject();
if (subject.getPrivateCredentials(KerberosTicket.class).isEmpty()) {
//error
throw new RuntimeException("Fail to verify user principal with section \"" + AuthUtils.LOGIN_CONTEXT_CLIENT + "\" in login configuration file " + login_conf);
}
final String principal = StringUtils.isBlank(asUser) ? getPrincipal(subject) : asUser;
String serviceName = AuthUtils.get(login_conf, AuthUtils.LOGIN_CONTEXT_CLIENT, "serviceName");
if (serviceName == null) {
serviceName = AuthUtils.SERVICE;
}
Map<String, String> props = new TreeMap<String, String>();
props.put(Sasl.QOP, "auth");
props.put(Sasl.SERVER_AUTH, "false");
LOG.debug("SASL GSSAPI client transport is being established");
final TTransport sasalTransport = new TSaslClientTransport(KERBEROS, principal, serviceName, serverHost, props, null, transport);
//open Sasl transport with the login credential
try {
Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
public Void run() {
try {
LOG.debug("do as:" + principal);
sasalTransport.open();
} catch (Exception e) {
LOG.error("Client failed to open SaslClientTransport to interact with a server during session initiation: " + e, e);
}
return null;
}
});
} catch (PrivilegedActionException e) {
throw new RuntimeException(e);
}
return sasalTransport;
}
use of org.apache.thrift.transport.TTransport in project storm by apache.
the class SimpleTransportPlugin method connect.
/**
* Connect to the specified server via framed transport
* @param transport The underlying Thrift transport.
* @param serverHost unused.
* @param asUser unused.
*/
@Override
public TTransport connect(TTransport transport, String serverHost, String asUser) throws TTransportException {
int maxBufferSize = type.getMaxBufferSize(storm_conf);
//create a framed transport
TTransport conn = new TFramedTransport(transport, maxBufferSize);
//connect
conn.open();
LOG.debug("Simple client transport has been established");
return conn;
}
use of org.apache.thrift.transport.TTransport in project hbase by apache.
the class DemoClient method run.
private void run() throws Exception {
TTransport transport = new TSocket(host, port);
if (secure) {
Map<String, String> saslProperties = new HashMap<>();
saslProperties.put(Sasl.QOP, "auth-conf,auth-int,auth");
/**
* The Thrift server the DemoClient is trying to connect to
* must have a matching principal, and support authentication.
*
* The HBase cluster must be secure, allow proxy user.
*/
transport = new TSaslClientTransport("GSSAPI", null, // Thrift server user name, should be an authorized proxy user.
"hbase", // Thrift server domain
host, saslProperties, null, transport);
}
transport.open();
TProtocol protocol = new TBinaryProtocol(transport, true, true);
Hbase.Client client = new Hbase.Client(protocol);
byte[] t = bytes("demo_table");
//
// Scan all tables, look for the demo table and delete it.
//
System.out.println("scanning tables...");
for (ByteBuffer name : client.getTableNames()) {
System.out.println(" found: " + utf8(name.array()));
if (utf8(name.array()).equals(utf8(t))) {
if (client.isTableEnabled(name)) {
System.out.println(" disabling table: " + utf8(name.array()));
client.disableTable(name);
}
System.out.println(" deleting table: " + utf8(name.array()));
client.deleteTable(name);
}
}
//
// Create the demo table with two column families, entry: and unused:
//
ArrayList<ColumnDescriptor> columns = new ArrayList<>(2);
ColumnDescriptor col;
col = new ColumnDescriptor();
col.name = ByteBuffer.wrap(bytes("entry:"));
col.timeToLive = Integer.MAX_VALUE;
col.maxVersions = 10;
columns.add(col);
col = new ColumnDescriptor();
col.name = ByteBuffer.wrap(bytes("unused:"));
col.timeToLive = Integer.MAX_VALUE;
columns.add(col);
System.out.println("creating table: " + utf8(t));
try {
client.createTable(ByteBuffer.wrap(t), columns);
} catch (AlreadyExists ae) {
System.out.println("WARN: " + ae.message);
}
System.out.println("column families in " + utf8(t) + ": ");
Map<ByteBuffer, ColumnDescriptor> columnMap = client.getColumnDescriptors(ByteBuffer.wrap(t));
for (ColumnDescriptor col2 : columnMap.values()) {
System.out.println(" column: " + utf8(col2.name.array()) + ", maxVer: " + Integer.toString(col2.maxVersions));
}
Map<ByteBuffer, ByteBuffer> dummyAttributes = null;
boolean writeToWal = false;
//
// Test UTF-8 handling
//
byte[] invalid = { (byte) 'f', (byte) 'o', (byte) 'o', (byte) '-', (byte) 0xfc, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1 };
byte[] valid = { (byte) 'f', (byte) 'o', (byte) 'o', (byte) '-', (byte) 0xE7, (byte) 0x94, (byte) 0x9F, (byte) 0xE3, (byte) 0x83, (byte) 0x93, (byte) 0xE3, (byte) 0x83, (byte) 0xBC, (byte) 0xE3, (byte) 0x83, (byte) 0xAB };
ArrayList<Mutation> mutations;
// non-utf8 is fine for data
mutations = new ArrayList<>(1);
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(invalid), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(bytes("foo")), mutations, dummyAttributes);
// this row name is valid utf8
mutations = new ArrayList<>(1);
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(valid), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(valid), mutations, dummyAttributes);
// non-utf8 is now allowed in row names because HBase stores values as binary
mutations = new ArrayList<>(1);
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(invalid), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(invalid), mutations, dummyAttributes);
// Run a scanner on the rows we just created
ArrayList<ByteBuffer> columnNames = new ArrayList<>();
columnNames.add(ByteBuffer.wrap(bytes("entry:")));
System.out.println("Starting scanner...");
int scanner = client.scannerOpen(ByteBuffer.wrap(t), ByteBuffer.wrap(bytes("")), columnNames, dummyAttributes);
while (true) {
List<TRowResult> entry = client.scannerGet(scanner);
if (entry.isEmpty()) {
break;
}
printRow(entry);
}
//
for (int i = 100; i >= 0; --i) {
// format row keys as "00000" to "00100"
NumberFormat nf = NumberFormat.getInstance();
nf.setMinimumIntegerDigits(5);
nf.setGroupingUsed(false);
byte[] row = bytes(nf.format(i));
mutations = new ArrayList<>(1);
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("unused:")), ByteBuffer.wrap(bytes("DELETE_ME")), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
client.deleteAllRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes);
// sleep to force later timestamp
try {
Thread.sleep(50);
} catch (InterruptedException e) {
// no-op
}
mutations = new ArrayList<>(2);
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:num")), ByteBuffer.wrap(bytes("0")), writeToWal));
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(bytes("FOO")), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
Mutation m;
mutations = new ArrayList<>(2);
m = new Mutation();
m.column = ByteBuffer.wrap(bytes("entry:foo"));
m.isDelete = true;
mutations.add(m);
m = new Mutation();
m.column = ByteBuffer.wrap(bytes("entry:num"));
m.value = ByteBuffer.wrap(bytes("-1"));
mutations.add(m);
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
mutations = new ArrayList<>();
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:num")), ByteBuffer.wrap(bytes(Integer.toString(i))), writeToWal));
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:sqr")), ByteBuffer.wrap(bytes(Integer.toString(i * i))), writeToWal));
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
// sleep to force later timestamp
try {
Thread.sleep(50);
} catch (InterruptedException e) {
// no-op
}
mutations.clear();
m = new Mutation();
m.column = ByteBuffer.wrap(bytes("entry:num"));
m.value = ByteBuffer.wrap(bytes("-999"));
mutations.add(m);
m = new Mutation();
m.column = ByteBuffer.wrap(bytes("entry:sqr"));
m.isDelete = true;
// shouldn't override latest
client.mutateRowTs(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, 1, dummyAttributes);
printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
List<TCell> versions = client.getVer(ByteBuffer.wrap(t), ByteBuffer.wrap(row), ByteBuffer.wrap(bytes("entry:num")), 10, dummyAttributes);
printVersions(ByteBuffer.wrap(row), versions);
if (versions.isEmpty()) {
System.out.println("FATAL: wrong # of versions");
System.exit(-1);
}
List<TCell> result = client.get(ByteBuffer.wrap(t), ByteBuffer.wrap(row), ByteBuffer.wrap(bytes("entry:foo")), dummyAttributes);
if (!result.isEmpty()) {
System.out.println("FATAL: shouldn't get here");
System.exit(-1);
}
System.out.println("");
}
// scan all rows/columnNames
columnNames.clear();
for (ColumnDescriptor col2 : client.getColumnDescriptors(ByteBuffer.wrap(t)).values()) {
System.out.println("column with name: " + new String(col2.name.array()));
System.out.println(col2.toString());
columnNames.add(col2.name);
}
System.out.println("Starting scanner...");
scanner = client.scannerOpenWithStop(ByteBuffer.wrap(t), ByteBuffer.wrap(bytes("00020")), ByteBuffer.wrap(bytes("00040")), columnNames, dummyAttributes);
while (true) {
List<TRowResult> entry = client.scannerGet(scanner);
if (entry.isEmpty()) {
System.out.println("Scanner finished");
break;
}
printRow(entry);
}
transport.close();
}
use of org.apache.thrift.transport.TTransport in project hbase by apache.
the class DemoClient method run.
public void run() throws Exception {
int timeout = 10000;
boolean framed = false;
TTransport transport = new TSocket(host, port, timeout);
if (framed) {
transport = new TFramedTransport(transport);
} else if (secure) {
/**
* The Thrift server the DemoClient is trying to connect to
* must have a matching principal, and support authentication.
*
* The HBase cluster must be secure, allow proxy user.
*/
Map<String, String> saslProperties = new HashMap<>();
saslProperties.put(Sasl.QOP, "auth-conf,auth-int,auth");
transport = new TSaslClientTransport("GSSAPI", null, // Thrift server user name, should be an authorized proxy user
user != null ? user : "hbase", // Thrift server domain
host, saslProperties, null, transport);
}
TProtocol protocol = new TBinaryProtocol(transport);
// This is our thrift client.
THBaseService.Iface client = new THBaseService.Client(protocol);
// open the transport
transport.open();
ByteBuffer table = ByteBuffer.wrap("example".getBytes());
TPut put = new TPut();
put.setRow("row1".getBytes());
TColumnValue columnValue = new TColumnValue();
columnValue.setFamily("family1".getBytes());
columnValue.setQualifier("qualifier1".getBytes());
columnValue.setValue("value1".getBytes());
List<TColumnValue> columnValues = new ArrayList<>(1);
columnValues.add(columnValue);
put.setColumnValues(columnValues);
client.put(table, put);
TGet get = new TGet();
get.setRow("row1".getBytes());
TResult result = client.get(table, get);
System.out.print("row = " + new String(result.getRow()));
for (TColumnValue resultColumnValue : result.getColumnValues()) {
System.out.print("family = " + new String(resultColumnValue.getFamily()));
System.out.print("qualifier = " + new String(resultColumnValue.getFamily()));
System.out.print("value = " + new String(resultColumnValue.getValue()));
System.out.print("timestamp = " + resultColumnValue.getTimestamp());
}
transport.close();
}
use of org.apache.thrift.transport.TTransport in project hbase by apache.
the class TBoundedThreadPoolServer method serve.
public void serve() {
try {
serverTransport_.listen();
} catch (TTransportException ttx) {
LOG.error("Error occurred during listening.", ttx);
return;
}
Runtime.getRuntime().addShutdownHook(new Thread(getClass().getSimpleName() + "-shutdown-hook") {
@Override
public void run() {
TBoundedThreadPoolServer.this.stop();
}
});
stopped = false;
while (!stopped && !Thread.interrupted()) {
TTransport client = null;
try {
client = serverTransport_.accept();
} catch (TTransportException ttx) {
if (!stopped) {
LOG.warn("Transport error when accepting message", ttx);
continue;
} else {
// The server has been stopped
break;
}
}
ClientConnnection command = new ClientConnnection(client);
try {
executorService.execute(command);
} catch (RejectedExecutionException rex) {
if (client.getClass() == TSocket.class) {
// We expect the client to be TSocket.
LOG.warn(QUEUE_FULL_MSG + " from " + ((TSocket) client).getSocket().getRemoteSocketAddress());
} else {
LOG.warn(QUEUE_FULL_MSG, rex);
}
client.close();
}
}
shutdownServer();
}
Aggregations