use of org.apache.thrift.transport.TTransport in project jena by apache.
the class TRDF method protocol.
/**
* Create Thrift protocol for the OutputStream.
* The caller must call {@link TRDF#flush(TProtocol)}
* which will flush the underlying (internally buffered) output stream.
* @param out OutputStream
*/
public static TProtocol protocol(OutputStream out) {
try {
// Flushing the protocol will flush the BufferedOutputStream
if (!(out instanceof BufferedOutputStream))
out = new BufferedOutputStream(out, OutputBufferSize);
TTransport transport = new TIOStreamTransport(out);
transport.open();
TProtocol protocol = protocol(transport);
return protocol;
} catch (TException ex) {
TRDF.exception(ex);
return null;
}
}
use of org.apache.thrift.transport.TTransport in project cdap by caskdata.
the class ThriftHelper method getThriftProtocol.
/**
* generic method to discover a thrift service and start up the
* thrift transport and protocol layer.
*/
public static TProtocol getThriftProtocol(String serviceName, EndpointStrategy endpointStrategy) throws ServerException {
Discoverable endpoint = endpointStrategy.pick();
if (endpoint == null) {
String message = String.format("Service '%s' is not registered in discovery service.", serviceName);
LOG.error(message);
throw new ServerException(message);
}
TTransport transport = new TFramedTransport(new TSocket(endpoint.getSocketAddress().getHostName(), endpoint.getSocketAddress().getPort()));
try {
transport.open();
} catch (TTransportException e) {
String message = String.format("Unable to connect to thrift service %s at %s. Reason: %s", serviceName, endpoint.getSocketAddress(), e.getMessage());
LOG.error(message);
throw new ServerException(message, e);
}
// now try to connect the thrift client
return new TBinaryProtocol(transport);
}
use of org.apache.thrift.transport.TTransport in project hive by apache.
the class TestThriftHttpCLIServiceFeatures method testCustomCookies.
/**
* Test additional http headers passed to request interceptor.
* @throws Exception
*/
@Test
public void testCustomCookies() throws Exception {
TTransport transport;
DefaultHttpClient hClient = new DefaultHttpClient();
String httpUrl = getHttpUrl();
Map<String, String> additionalHeaders = new HashMap<String, String>();
Map<String, String> cookieHeaders = new HashMap<String, String>();
cookieHeaders.put("key1", "value1");
cookieHeaders.put("key2", "value2");
HttpBasicAuthInterceptorWithLogging authInt = new HttpBasicAuthInterceptorWithLogging(ThriftCLIServiceTest.USERNAME, ThriftCLIServiceTest.PASSWORD, null, null, false, additionalHeaders, cookieHeaders);
hClient.addRequestInterceptor(authInt);
transport = new THttpClient(httpUrl, hClient);
TCLIService.Client httpClient = getClient(transport);
// Create a new open session request object
TOpenSessionReq openReq = new TOpenSessionReq();
httpClient.OpenSession(openReq).getSessionHandle();
String cookieHeader = authInt.getCookieHeader();
assertTrue(cookieHeader.contains("key1=value1"));
assertTrue(cookieHeader.contains("key2=value2"));
}
use of org.apache.thrift.transport.TTransport in project hive by apache.
the class TestThriftHttpCLIServiceFeatures method testBinaryClientHttpServer.
@Test
public /**
* Tests calls from a raw (NOSASL) binary client,
* to a HiveServer2 running in http mode.
* This should throw an expected exception due to incompatibility.
* @throws Exception
*/
void testBinaryClientHttpServer() throws Exception {
TTransport transport = getRawBinaryTransport();
TCLIService.Client rawBinaryClient = getClient(transport);
// This will throw an expected exception since client-server modes are incompatible
testOpenSessionExpectedException(rawBinaryClient);
}
use of org.apache.thrift.transport.TTransport in project hive by apache.
the class TestThriftHttpCLIServiceFeatures method testAdditionalHttpHeaders.
/**
* Test additional http headers passed to request interceptor.
* @throws Exception
*/
@Test
public void testAdditionalHttpHeaders() throws Exception {
TTransport transport;
DefaultHttpClient hClient = new DefaultHttpClient();
String httpUrl = getHttpUrl();
Map<String, String> additionalHeaders = new HashMap<String, String>();
additionalHeaders.put("key1", "value1");
additionalHeaders.put("key2", "value2");
HttpBasicAuthInterceptorWithLogging authInt = new HttpBasicAuthInterceptorWithLogging(ThriftCLIServiceTest.USERNAME, ThriftCLIServiceTest.PASSWORD, null, null, false, additionalHeaders, null);
hClient.addRequestInterceptor(authInt);
transport = new THttpClient(httpUrl, hClient);
TCLIService.Client httpClient = getClient(transport);
// Create a new open session request object
TOpenSessionReq openReq = new TOpenSessionReq();
httpClient.OpenSession(openReq).getSessionHandle();
ArrayList<String> headers = authInt.getRequestHeaders();
for (String h : headers) {
assertTrue(h.contains("key1:value1"));
assertTrue(h.contains("key2:value2"));
}
}
Aggregations