use of org.apache.storm.hive.common.HiveWriter in project storm by apache.
the class HiveBolt method retireIdleWriters.
/**
* Locate all writers past idle timeout and retire them.
* @return number of writers retired
*/
private int retireIdleWriters() {
LOG.info("Attempting close idle writers");
int count = 0;
long now = System.currentTimeMillis();
// 1) Find retirement candidates
for (Entry<HiveEndPoint, HiveWriter> entry : allWriters.entrySet()) {
if (now - entry.getValue().getLastUsed() > options.getIdleTimeout()) {
++count;
retire(entry.getKey());
}
}
return count;
}
use of org.apache.storm.hive.common.HiveWriter in project storm by apache.
the class HiveBolt method prepare.
@Override
public void prepare(Map<String, Object> conf, TopologyContext topologyContext, OutputCollector collector) {
try {
tokenAuthEnabled = HiveUtils.isTokenAuthEnabled(conf);
try {
ugi = HiveUtils.authenticate(tokenAuthEnabled, options.getKerberosKeytab(), options.getKerberosPrincipal());
} catch (HiveUtils.AuthenticationFailed ex) {
LOG.error("Hive kerberos authentication failed " + ex.getMessage(), ex);
throw new IllegalArgumentException(ex);
}
this.collector = collector;
this.batchHelper = new BatchHelper(options.getBatchSize(), collector);
allWriters = new ConcurrentHashMap<HiveEndPoint, HiveWriter>();
String timeoutName = "hive-bolt-%d";
this.callTimeoutPool = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setNameFormat(timeoutName).build());
sendHeartBeat.set(true);
heartBeatTimer = new Timer(topologyContext.getThisTaskId() + "-hb-timer", true);
setupHeartBeatTimer();
} catch (Exception e) {
LOG.warn("unable to make connection to hive ", e);
}
}
use of org.apache.storm.hive.common.HiveWriter in project storm by apache.
the class HiveState method retireEldestWriter.
/**
* Locate writer that has not been used for longest time and retire it.
*/
private void retireEldestWriter() {
long oldestTimeStamp = System.currentTimeMillis();
HiveEndPoint eldest = null;
for (Entry<HiveEndPoint, HiveWriter> entry : allWriters.entrySet()) {
if (entry.getValue().getLastUsed() < oldestTimeStamp) {
eldest = entry.getKey();
oldestTimeStamp = entry.getValue().getLastUsed();
}
}
try {
LOG.info("Closing least used Writer to Hive end point : " + eldest);
allWriters.remove(eldest).flushAndClose();
} catch (IOException e) {
LOG.warn("Failed to close writer for end point: " + eldest, e);
} catch (InterruptedException e) {
LOG.warn("Interrupted when attempting to close writer for end point: " + eldest, e);
Thread.currentThread().interrupt();
} catch (Exception e) {
LOG.warn("Interrupted when attempting to close writer for end point: " + eldest, e);
}
}
use of org.apache.storm.hive.common.HiveWriter in project storm by apache.
the class HiveState method writeTuples.
private void writeTuples(List<TridentTuple> tuples) throws Exception {
for (TridentTuple tuple : tuples) {
List<String> partitionVals = options.getMapper().mapPartitions(tuple);
HiveEndPoint endPoint = HiveUtils.makeEndPoint(partitionVals, options);
HiveWriter writer = getOrCreateWriter(endPoint);
writer.write(options.getMapper().mapRecord(tuple));
currentBatchSize++;
if (currentBatchSize >= options.getBatchSize()) {
flushAllWriters();
currentBatchSize = 0;
}
}
}
use of org.apache.storm.hive.common.HiveWriter in project storm by apache.
the class HiveState method prepare.
public void prepare(Map<String, Object> conf, IMetricsContext metrics, int partitionIndex, int numPartitions) {
try {
tokenAuthEnabled = HiveUtils.isTokenAuthEnabled(conf);
try {
ugi = HiveUtils.authenticate(tokenAuthEnabled, options.getKerberosKeytab(), options.getKerberosPrincipal());
} catch (HiveUtils.AuthenticationFailed ex) {
LOG.error("Hive kerberos authentication failed " + ex.getMessage(), ex);
throw new IllegalArgumentException(ex);
}
allWriters = new ConcurrentHashMap<HiveEndPoint, HiveWriter>();
String timeoutName = "hive-bolt-%d";
this.callTimeoutPool = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setNameFormat(timeoutName).build());
heartBeatTimer = new Timer("hive-hb-timer", true);
setupHeartBeatTimer();
} catch (Exception e) {
LOG.warn("unable to make connection to hive ", e);
}
}
Aggregations