use of org.apache.hadoop.chukwa.ChukwaArchiveKey in project Honu by jboulon.
the class LockFreeWriter method add.
/**
* Best effort, there's no guarantee that chunks
* have really been written to disk
*/
public CommitStatus add(List<Chunk> chunks) throws WriterException {
Tracer t = Tracer.startNewTracer("honu.server." + group + ".addToList");
long now = System.currentTimeMillis();
if (chunks != null) {
try {
chunksWrittenThisRotate = true;
ChukwaArchiveKey archiveKey = new ChukwaArchiveKey();
for (Chunk chunk : chunks) {
archiveKey.setTimePartition(timePeriod);
archiveKey.setDataType(chunk.getDataType());
archiveKey.setStreamName(chunk.getTags() + "/" + chunk.getSource() + "/" + chunk.getStreamName());
archiveKey.setSeqId(chunk.getSeqID());
if (chunk != null) {
seqFileWriter.append(archiveKey, chunk);
// compute size for stats
dataSize += chunk.getData().length;
}
}
long end = System.currentTimeMillis();
if (log.isDebugEnabled()) {
log.debug(group + "- duration=" + (end - now) + " size=" + chunks.size());
}
} catch (IOException e) {
if (t != null) {
t.stopAndLogTracer();
}
writeChunkRetries--;
log.error(group + "- Could not save the chunk. ", e);
if (writeChunkRetries < 0) {
log.fatal(group + "- Too many IOException when trying to write a chunk, Collector is going to exit!");
DaemonWatcher.bailout(-1);
}
throw new WriterException(e);
}
}
if (t != null) {
t.stopAndLogTracer();
}
return COMMIT_OK;
}
use of org.apache.hadoop.chukwa.ChukwaArchiveKey in project Honu by jboulon.
the class CmdLineConverter method append.
public static void append(SequenceFile.Writer seqFileWriter, Chunk chunk) throws Throwable {
ChukwaArchiveKey archiveKey = new ChukwaArchiveKey();
archiveKey.setTimePartition(System.currentTimeMillis());
archiveKey.setDataType(chunk.getDataType());
archiveKey.setStreamName("CmdLineConverter");
archiveKey.setSeqId(chunk.getSeqID());
seqFileWriter.append(archiveKey, chunk);
}
Aggregations