Search in sources :

Example 1 with Chunk

use of org.apache.hadoop.chukwa.Chunk in project Honu by jboulon.

the class LockFreeWriter method run.

public void run() {
    List<Chunk> chunks = new LinkedList<Chunk>();
    Chunk chunk = null;
    long now = 0;
    while ((isRunning || !chunks.isEmpty()) && (System.currentTimeMillis() < timeOut)) {
        try {
            now = System.currentTimeMillis();
            // Don't rotate if we are not running
            if (isRunning && (now >= nextRotate)) {
                rotate();
            }
            if (System.currentTimeMillis() >= nextTimePeriodComputation) {
                computeTimePeriod();
            }
            if (chunks.isEmpty()) {
                chunk = this.ChunkQueue.poll(1000, TimeUnit.MILLISECONDS);
                if (chunk == null) {
                    continue;
                }
                chunks.add(chunk);
                this.ChunkQueue.drainTo(chunks, 10);
            }
            add(chunks);
            chunks.clear();
        } catch (InterruptedException e) {
            isRunning = false;
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    log.info(group + "- Shutdown request exit loop ..., ChunkQueue.size at exit time: " + this.ChunkQueue.size());
    try {
        this.internalClose();
        log.info(group + "- Shutdown request internalClose done ...");
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        isStopped = true;
    }
}
Also used : Chunk(org.apache.hadoop.chukwa.Chunk) LinkedList(java.util.LinkedList) WriterException(org.apache.hadoop.chukwa.datacollection.writer.WriterException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException)

Example 2 with Chunk

use of org.apache.hadoop.chukwa.Chunk in project Honu by jboulon.

the class LockFreeWriter method add.

/**
	 *  Best effort, there's no guarantee that chunks 
	 *  have really been written to disk
	 */
public CommitStatus add(List<Chunk> chunks) throws WriterException {
    Tracer t = Tracer.startNewTracer("honu.server." + group + ".addToList");
    long now = System.currentTimeMillis();
    if (chunks != null) {
        try {
            chunksWrittenThisRotate = true;
            ChukwaArchiveKey archiveKey = new ChukwaArchiveKey();
            for (Chunk chunk : chunks) {
                archiveKey.setTimePartition(timePeriod);
                archiveKey.setDataType(chunk.getDataType());
                archiveKey.setStreamName(chunk.getTags() + "/" + chunk.getSource() + "/" + chunk.getStreamName());
                archiveKey.setSeqId(chunk.getSeqID());
                if (chunk != null) {
                    seqFileWriter.append(archiveKey, chunk);
                    // compute size for stats
                    dataSize += chunk.getData().length;
                }
            }
            long end = System.currentTimeMillis();
            if (log.isDebugEnabled()) {
                log.debug(group + "- duration=" + (end - now) + " size=" + chunks.size());
            }
        } catch (IOException e) {
            if (t != null) {
                t.stopAndLogTracer();
            }
            writeChunkRetries--;
            log.error(group + "- Could not save the chunk. ", e);
            if (writeChunkRetries < 0) {
                log.fatal(group + "- Too many IOException when trying to write a chunk, Collector is going to exit!");
                DaemonWatcher.bailout(-1);
            }
            throw new WriterException(e);
        }
    }
    if (t != null) {
        t.stopAndLogTracer();
    }
    return COMMIT_OK;
}
Also used : ChukwaArchiveKey(org.apache.hadoop.chukwa.ChukwaArchiveKey) Tracer(org.honu.util.Tracer) IOException(java.io.IOException) Chunk(org.apache.hadoop.chukwa.Chunk) WriterException(org.apache.hadoop.chukwa.datacollection.writer.WriterException)

Example 3 with Chunk

use of org.apache.hadoop.chukwa.Chunk in project Honu by jboulon.

the class CmdLineConverter method getChunk.

public static Chunk getChunk(ChunkBuilder cb, String dataType) {
    Chunk c = cb.getChunk();
    c.setApplication("CmdLineConverter");
    c.setDataType(dataType);
    c.setSeqID(System.currentTimeMillis());
    c.setSource(localHostAddr);
    return c;
}
Also used : Chunk(org.apache.hadoop.chukwa.Chunk)

Example 4 with Chunk

use of org.apache.hadoop.chukwa.Chunk in project Honu by jboulon.

the class ThriftCollectorLockFreeImpl method process.

public Result process(TChunk tChunk) throws TException {
    // Stop adding chunks if it's no running
    if (!isRunning) {
        Log.warn("Rejecting some incoming trafic!");
        Result result = new Result();
        result.setMessage("Shutting down");
        result.setResultCode(ResultCode.TRY_LATER);
        return result;
    }
    // If there's no log Events then return OK
    if (tChunk.getLogEventsSize() == 0) {
        Result result = new Result();
        result.setMessage("" + tChunk.getSeqId());
        result.setResultCode(ResultCode.OK);
        return result;
    }
    Tracer t = Tracer.startNewTracer("honu.server.processChunk");
    //this.counters.get(chunkCountField).incrementAndGet();
    ChunkBuilder cb = new ChunkBuilder();
    List<String> logEvents = tChunk.getLogEvents();
    for (String logEvent : logEvents) {
        cb.addRecord(logEvent.getBytes());
    }
    Chunk c = cb.getChunk();
    c.setApplication(tChunk.getApplication());
    c.setDataType(tChunk.getDataType());
    c.setSeqID(tChunk.getSeqId());
    c.setSource(tChunk.getSource());
    c.setTags(tChunk.getTags());
    if (isDebug) {
        System.out.println("\n\t ===============");
        System.out.println("tChunk.getApplication() :" + tChunk.getApplication());
        System.out.println("tChunk.getDataType() :" + tChunk.getDataType());
        System.out.println("tChunk.getSeqId() :" + tChunk.getSeqId());
        System.out.println("tChunk.getSource() :" + tChunk.getSource());
        System.out.println("tChunk.getStreamName() :" + tChunk.getStreamName());
        System.out.println("tChunk.getTags() :" + tChunk.getTags());
        System.out.println("c.getApplication() :" + c.getApplication());
        System.out.println("c.getDataType() :" + c.getDataType());
        System.out.println("c.getSeqID() :" + c.getSeqID());
        System.out.println("c.getSource() :" + c.getSource());
        System.out.println("c.getTags() :" + c.getTags());
        System.out.println("c.getData()" + new String(c.getData()));
    }
    boolean addResult = false;
    try {
        addResult = chunkQueue.offer(c, 2000, TimeUnit.MILLISECONDS);
    } catch (OutOfMemoryError ex) {
        ex.printStackTrace();
        DaemonWatcher.bailout(-1);
    } catch (Throwable e) {
        e.printStackTrace();
        addResult = false;
    }
    Result result = new Result();
    if (addResult) {
        try {
            Counter.increment("honu.server.chunkCount");
            Counter.increment("honu.server.logCount", logEvents.size());
            Counter.increment("honu.server." + tChunk.getApplication() + ".chunkCount");
            Counter.increment("honu.server." + tChunk.getApplication() + ".logCount", logEvents.size());
            (new Tracer("honu.server.chunkSize [messages, not msec]", logEvents.size())).logTracer();
            (new Tracer("honu.server." + tChunk.getApplication() + ".chunkSize [messages, not msec]", logEvents.size())).logTracer();
        } catch (Exception ignored) {
        }
        result.setMessage("" + tChunk.getSeqId());
        result.setResultCode(ResultCode.OK);
    } else {
        try {
            Counter.increment("honu.server.tryLater");
            Counter.increment("honu.server." + tChunk.getApplication() + ".tryLater");
        } catch (Exception ignored) {
        }
        result.setMessage("" + tChunk.getSeqId());
        result.setResultCode(ResultCode.TRY_LATER);
    }
    if (t != null) {
        t.stopAndLogTracer();
    }
    return result;
}
Also used : Tracer(org.honu.util.Tracer) ChunkBuilder(org.apache.hadoop.chukwa.ChunkBuilder) TChunk(org.honu.thrift.TChunk) Chunk(org.apache.hadoop.chukwa.Chunk) TException(org.apache.thrift.TException) Result(org.honu.thrift.Result)

Aggregations

Chunk (org.apache.hadoop.chukwa.Chunk)4 IOException (java.io.IOException)2 WriterException (org.apache.hadoop.chukwa.datacollection.writer.WriterException)2 Tracer (org.honu.util.Tracer)2 UnknownHostException (java.net.UnknownHostException)1 LinkedList (java.util.LinkedList)1 ChukwaArchiveKey (org.apache.hadoop.chukwa.ChukwaArchiveKey)1 ChunkBuilder (org.apache.hadoop.chukwa.ChunkBuilder)1 TException (org.apache.thrift.TException)1 Result (org.honu.thrift.Result)1 TChunk (org.honu.thrift.TChunk)1