use of org.apache.thrift.protocol.TBinaryProtocol in project hive by apache.
the class TestHCatInputFormat method setUp.
/**
* Create an input sequence file with 100 records; every 10th record is bad.
* Load this table into Hive.
*/
@Before
@Override
public void setUp() throws Exception {
super.setUp();
if (setUpComplete) {
return;
}
Path intStringSeq = new Path(TEST_DATA_DIR + "/data/intString.seq");
LOG.info("Creating data file: " + intStringSeq);
SequenceFile.Writer seqFileWriter = SequenceFile.createWriter(intStringSeq.getFileSystem(hiveConf), hiveConf, intStringSeq, NullWritable.class, BytesWritable.class);
ByteArrayOutputStream out = new ByteArrayOutputStream();
TIOStreamTransport transport = new TIOStreamTransport(out);
TBinaryProtocol protocol = new TBinaryProtocol(transport);
for (int i = 1; i <= 100; i++) {
if (i % 10 == 0) {
seqFileWriter.append(NullWritable.get(), new BytesWritable("bad record".getBytes()));
} else {
out.reset();
IntString intString = new IntString(i, Integer.toString(i), i);
intString.write(protocol);
BytesWritable bytesWritable = new BytesWritable(out.toByteArray());
seqFileWriter.append(NullWritable.get(), bytesWritable);
}
}
seqFileWriter.close();
// Now let's load this file into a new Hive table.
Assert.assertEquals(0, driver.run("drop table if exists test_bad_records").getResponseCode());
Assert.assertEquals(0, driver.run("create table test_bad_records " + "row format serde 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer' " + "with serdeproperties ( " + " 'serialization.class'='org.apache.hadoop.hive.serde2.thrift.test.IntString', " + " 'serialization.format'='org.apache.thrift.protocol.TBinaryProtocol') " + "stored as" + " inputformat 'org.apache.hadoop.mapred.SequenceFileInputFormat'" + " outputformat 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'").getResponseCode());
Assert.assertEquals(0, driver.run("load data local inpath '" + intStringSeq.getParent() + "' into table test_bad_records").getResponseCode());
setUpComplete = true;
}
use of org.apache.thrift.protocol.TBinaryProtocol in project hive by apache.
the class HiveMetaStoreClient method open.
private void open() throws MetaException {
isConnected = false;
TTransportException tte = null;
boolean useSSL = MetastoreConf.getBoolVar(conf, ConfVars.USE_SSL);
boolean useSasl = MetastoreConf.getBoolVar(conf, ConfVars.USE_THRIFT_SASL);
boolean useFramedTransport = MetastoreConf.getBoolVar(conf, ConfVars.USE_THRIFT_FRAMED_TRANSPORT);
boolean useCompactProtocol = MetastoreConf.getBoolVar(conf, ConfVars.USE_THRIFT_COMPACT_PROTOCOL);
int clientSocketTimeout = (int) MetastoreConf.getTimeVar(conf, ConfVars.CLIENT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS);
for (int attempt = 0; !isConnected && attempt < retries; ++attempt) {
for (URI store : metastoreUris) {
LOG.info("Trying to connect to metastore with URI " + store);
try {
if (useSSL) {
try {
String trustStorePath = MetastoreConf.getVar(conf, ConfVars.SSL_TRUSTSTORE_PATH).trim();
if (trustStorePath.isEmpty()) {
throw new IllegalArgumentException(ConfVars.SSL_TRUSTSTORE_PATH.toString() + " Not configured for SSL connection");
}
String trustStorePassword = MetastoreConf.getPassword(conf, MetastoreConf.ConfVars.SSL_TRUSTSTORE_PASSWORD);
// Create an SSL socket and connect
transport = SecurityUtils.getSSLSocket(store.getHost(), store.getPort(), clientSocketTimeout, trustStorePath, trustStorePassword);
LOG.info("Opened an SSL connection to metastore, current connections: " + connCount.incrementAndGet());
} catch (IOException e) {
throw new IllegalArgumentException(e);
} catch (TTransportException e) {
tte = e;
throw new MetaException(e.toString());
}
} else {
transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout);
}
if (useSasl) {
// Wrap thrift connection with SASL for secure connection.
try {
HadoopThriftAuthBridge.Client authBridge = HadoopThriftAuthBridge.getBridge().createClient();
// check if we should use delegation tokens to authenticate
// the call below gets hold of the tokens if they are set up by hadoop
// this should happen on the map/reduce tasks if the client added the
// tokens into hadoop's credential store in the front end during job
// submission.
String tokenSig = MetastoreConf.getVar(conf, ConfVars.TOKEN_SIGNATURE);
// tokenSig could be null
tokenStrForm = SecurityUtils.getTokenStrForm(tokenSig);
if (tokenStrForm != null) {
LOG.info("HMSC::open(): Found delegation token. Creating DIGEST-based thrift connection.");
// authenticate using delegation tokens via the "DIGEST" mechanism
transport = authBridge.createClientTransport(null, store.getHost(), "DIGEST", tokenStrForm, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL));
} else {
LOG.info("HMSC::open(): Could not find delegation token. Creating KERBEROS-based thrift connection.");
String principalConfig = MetastoreConf.getVar(conf, ConfVars.KERBEROS_PRINCIPAL);
transport = authBridge.createClientTransport(principalConfig, store.getHost(), "KERBEROS", null, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL));
}
} catch (IOException ioe) {
LOG.error("Couldn't create client transport", ioe);
throw new MetaException(ioe.toString());
}
} else {
if (useFramedTransport) {
transport = new TFramedTransport(transport);
}
}
final TProtocol protocol;
if (useCompactProtocol) {
protocol = new TCompactProtocol(transport);
} else {
protocol = new TBinaryProtocol(transport);
}
client = new ThriftHiveMetastore.Client(protocol);
try {
if (!transport.isOpen()) {
transport.open();
LOG.info("Opened a connection to metastore, current connections: " + connCount.incrementAndGet());
}
isConnected = true;
} catch (TTransportException e) {
tte = e;
if (LOG.isDebugEnabled()) {
LOG.warn("Failed to connect to the MetaStore Server...", e);
} else {
// Don't print full exception trace if DEBUG is not on.
LOG.warn("Failed to connect to the MetaStore Server...");
}
}
if (isConnected && !useSasl && MetastoreConf.getBoolVar(conf, ConfVars.EXECUTE_SET_UGI)) {
// Call set_ugi, only in unsecure mode.
try {
UserGroupInformation ugi = SecurityUtils.getUGI();
client.set_ugi(ugi.getUserName(), Arrays.asList(ugi.getGroupNames()));
} catch (LoginException e) {
LOG.warn("Failed to do login. set_ugi() is not successful, " + "Continuing without it.", e);
} catch (IOException e) {
LOG.warn("Failed to find ugi of client set_ugi() is not successful, " + "Continuing without it.", e);
} catch (TException e) {
LOG.warn("set_ugi() not successful, Likely cause: new client talking to old server. " + "Continuing without it.", e);
}
}
} catch (MetaException e) {
LOG.error("Unable to connect to metastore with URI " + store + " in attempt " + attempt, e);
}
if (isConnected) {
break;
}
}
// Wait before launching the next round of connection retries.
if (!isConnected && retryDelaySeconds > 0) {
try {
LOG.info("Waiting " + retryDelaySeconds + " seconds before next connection attempt.");
Thread.sleep(retryDelaySeconds * 1000);
} catch (InterruptedException ignore) {
}
}
}
if (!isConnected) {
throw new MetaException("Could not connect to meta store using any of the URIs provided." + " Most recent failure: " + StringUtils.stringifyException(tte));
}
snapshotActiveConf();
LOG.info("Connected to metastore.");
}
use of org.apache.thrift.protocol.TBinaryProtocol in project vcell by virtualcell.
the class BioformatsImageDatasetReader method readImageDatasetFromMultiFiles.
@Override
public ImageDataset readImageDatasetFromMultiFiles(File[] files, ClientTaskStatusSupport status, boolean isTimeSeries, double timeInterval) throws Exception {
try (TTransport transport = new TSocket("localhost", port)) {
transport.open();
TProtocol protocol = new TBinaryProtocol(transport);
ImageDatasetService.Client client = new ImageDatasetService.Client(protocol);
ArrayList<String> fileList = new ArrayList<String>();
for (File f : files) {
fileList.add(f.getAbsolutePath());
}
org.vcell.imagedataset.ImageDataset t_imageDataset = client.readImageDatasetFromMultiFiles(fileList, isTimeSeries, timeInterval);
ImageDataset imageDataset = getImageDataset(t_imageDataset);
return imageDataset;
}
}
use of org.apache.thrift.protocol.TBinaryProtocol in project vcell by virtualcell.
the class BioformatsImageDatasetReader method getImageSizeInfoForceZ.
@Override
public ImageSizeInfo getImageSizeInfoForceZ(String fileName, int forceZSize) throws Exception {
try (TTransport transport = new TSocket("localhost", port)) {
transport.open();
TProtocol protocol = new TBinaryProtocol(transport);
ImageDatasetService.Client client = new ImageDatasetService.Client(protocol);
org.vcell.imagedataset.ImageSizeInfo t_imageSizeInfo = client.getImageSizeInfoForceZ(fileName, forceZSize);
org.vcell.imagedataset.ISize t_size = t_imageSizeInfo.getISize();
double[] timePoints = new double[t_imageSizeInfo.getTimePoints().size()];
for (int i = 0; i < t_imageSizeInfo.getTimePointsSize(); i++) {
timePoints[i] = t_imageSizeInfo.getTimePoints().get(i);
}
ImageSizeInfo imageSizeInfo = new ImageSizeInfo(t_imageSizeInfo.imagePath, new ISize(t_size.getX(), t_size.getY(), t_size.getZ()), t_imageSizeInfo.numChannels, timePoints, t_imageSizeInfo.selectedTimeIndex);
return imageSizeInfo;
}
}
Aggregations