use of org.apache.storm.hive.common.HiveWriter in project storm by apache.
the class HiveState method retireIdleWriters.
/**
* Locate all writers past idle timeout and retire them.
* @return number of writers retired
*/
private int retireIdleWriters() {
int count = 0;
long now = System.currentTimeMillis();
ArrayList<HiveEndPoint> retirees = new ArrayList<HiveEndPoint>();
// 1) Find retirement candidates
for (Entry<HiveEndPoint, HiveWriter> entry : allWriters.entrySet()) {
if (now - entry.getValue().getLastUsed() > options.getIdleTimeout()) {
++count;
retirees.add(entry.getKey());
}
}
// 2) Retire them
for (HiveEndPoint ep : retirees) {
try {
LOG.info("Closing idle Writer to Hive end point : {}", ep);
allWriters.remove(ep).flushAndClose();
} catch (IOException e) {
LOG.warn("Failed to close writer for end point: {}. Error: " + ep, e);
} catch (InterruptedException e) {
LOG.warn("Interrupted when attempting to close writer for end point: " + ep, e);
Thread.currentThread().interrupt();
} catch (Exception e) {
LOG.warn("Interrupted when attempting to close writer for end point: " + ep, e);
}
}
return count;
}
use of org.apache.storm.hive.common.HiveWriter in project storm by apache.
the class HiveBolt method prepare.
@Override
public void prepare(Map conf, TopologyContext topologyContext, OutputCollector collector) {
try {
if (options.getKerberosPrincipal() == null && options.getKerberosKeytab() == null) {
kerberosEnabled = false;
} else if (options.getKerberosPrincipal() != null && options.getKerberosKeytab() != null) {
kerberosEnabled = true;
} else {
throw new IllegalArgumentException("To enable Kerberos, need to set both KerberosPrincipal " + " & KerberosKeytab");
}
if (kerberosEnabled) {
try {
ugi = HiveUtils.authenticate(options.getKerberosKeytab(), options.getKerberosPrincipal());
} catch (HiveUtils.AuthenticationFailed ex) {
LOG.error("Hive Kerberos authentication failed " + ex.getMessage(), ex);
throw new IllegalArgumentException(ex);
}
}
this.collector = collector;
this.batchHelper = new BatchHelper(options.getBatchSize(), collector);
allWriters = new ConcurrentHashMap<HiveEndPoint, HiveWriter>();
String timeoutName = "hive-bolt-%d";
this.callTimeoutPool = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setNameFormat(timeoutName).build());
sendHeartBeat.set(true);
heartBeatTimer = new Timer();
setupHeartBeatTimer();
} catch (Exception e) {
LOG.warn("unable to make connection to hive ", e);
}
}
use of org.apache.storm.hive.common.HiveWriter in project storm by apache.
the class HiveBolt method retireEldestWriter.
/**
* Locate writer that has not been used for longest time and retire it.
*/
private void retireEldestWriter() {
LOG.info("Attempting close eldest writers");
long oldestTimeStamp = System.currentTimeMillis();
HiveEndPoint eldest = null;
for (Entry<HiveEndPoint, HiveWriter> entry : allWriters.entrySet()) {
if (entry.getValue().getLastUsed() < oldestTimeStamp) {
eldest = entry.getKey();
oldestTimeStamp = entry.getValue().getLastUsed();
}
}
try {
LOG.info("Closing least used Writer to Hive end point : " + eldest);
allWriters.remove(eldest).flushAndClose();
} catch (IOException e) {
LOG.warn("Failed to close writer for end point: " + eldest, e);
} catch (InterruptedException e) {
LOG.warn("Interrupted when attempting to close writer for end point: " + eldest, e);
Thread.currentThread().interrupt();
} catch (Exception e) {
LOG.warn("Interrupted when attempting to close writer for end point: " + eldest, e);
}
}
use of org.apache.storm.hive.common.HiveWriter in project storm by apache.
the class HiveBolt method cleanup.
@Override
public void cleanup() {
sendHeartBeat.set(false);
for (Entry<HiveEndPoint, HiveWriter> entry : allWriters.entrySet()) {
try {
HiveWriter w = entry.getValue();
w.flushAndClose();
} catch (Exception ex) {
LOG.warn("Error while closing writer to " + entry.getKey() + ". Exception follows.", ex);
if (ex instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
}
}
ExecutorService[] toShutdown = { callTimeoutPool };
for (ExecutorService execService : toShutdown) {
execService.shutdown();
try {
while (!execService.isTerminated()) {
execService.awaitTermination(options.getCallTimeOut(), TimeUnit.MILLISECONDS);
}
} catch (InterruptedException ex) {
LOG.warn("shutdown interrupted on " + execService, ex);
}
}
callTimeoutPool = null;
if (heartBeatTimer != null) {
heartBeatTimer.cancel();
}
super.cleanup();
LOG.info("Hive Bolt stopped");
}
use of org.apache.storm.hive.common.HiveWriter in project storm by apache.
the class HiveBolt method getOrCreateWriter.
@VisibleForTesting
HiveWriter getOrCreateWriter(HiveEndPoint endPoint) throws HiveWriter.ConnectFailure, InterruptedException {
try {
HiveWriter writer = allWriters.get(endPoint);
if (writer == null) {
LOG.debug("Creating Writer to Hive end point : " + endPoint);
writer = HiveUtils.makeHiveWriter(endPoint, callTimeoutPool, ugi, options, tokenAuthEnabled);
if (allWriters.size() > (options.getMaxOpenConnections() - 1)) {
LOG.info("cached HiveEndPoint size {} exceeded maxOpenConnections {} ", allWriters.size(), options.getMaxOpenConnections());
int retired = retireIdleWriters();
if (retired == 0) {
retireEldestWriter();
}
}
allWriters.put(endPoint, writer);
HiveUtils.logAllHiveEndPoints(allWriters);
}
return writer;
} catch (HiveWriter.ConnectFailure e) {
LOG.error("Failed to create HiveWriter for endpoint: " + endPoint, e);
throw e;
}
}
Aggregations