use of com.linkedin.databus2.producers.ds.Transaction in project databus by linkedin.
the class ORListener method run.
@Override
public void run() {
List<BinlogEventV4> eventList = new ArrayList<BinlogEventV4>();
BinlogEventV4 event;
while (!isShutdownRequested()) {
if (isPauseRequested()) {
LOG.info("Pause requested for ORListener. Pausing !!");
signalPause();
LOG.info("Pausing. Waiting for resume command");
try {
awaitUnPauseRequest();
} catch (InterruptedException e) {
_log.info("Interrupted !!");
}
LOG.info("Resuming ORListener !!");
signalResumed();
LOG.info("ORListener resumed !!");
}
eventList.clear();
int eventNumber = _binlogEventQueue.drainTo(eventList);
if (eventNumber == 0) {
try {
event = _binlogEventQueue.poll(_queueTimeoutMs, TimeUnit.MILLISECONDS);
if (event != null) {
eventList.add(event);
eventNumber = eventList.size();
}
} catch (InterruptedException e) {
_log.info("Interrupted when poll from _binlogEventQueue!!");
}
}
for (int i = 0; i < eventNumber; i++) {
event = eventList.get(i);
if (event == null) {
_log.error("Received null event");
continue;
}
try {
// Beginning of Txn
if (event instanceof QueryEvent) {
QueryEvent qe = (QueryEvent) event;
String sql = qe.getSql().toString();
if ("BEGIN".equalsIgnoreCase(sql)) {
_isBeginTxnSeen = true;
_log.info("BEGIN sql: " + sql);
_currTxnSizeInBytes = event.getHeader().getEventLength();
startXtion(qe);
continue;
}
} else if (event instanceof RotateEvent) {
RotateEvent re = (RotateEvent) event;
String fileName = re.getBinlogFileName().toString();
_log.info("File Rotated : FileName :" + fileName + ", _binlogFilePrefix :" + _binlogFilePrefix);
String fileNumStr = fileName.substring(fileName.lastIndexOf(_binlogFilePrefix) + _binlogFilePrefix.length() + 1);
_currFileNum = Integer.parseInt(fileNumStr);
_tableMapEvents.clear();
continue;
}
if (!_isBeginTxnSeen) {
if (_log.isDebugEnabled()) {
_log.debug("Skipping event (" + event + ") as this is before the start of first transaction");
}
continue;
}
_currTxnSizeInBytes += event.getHeader().getEventLength();
if (event instanceof QueryEvent) {
QueryEvent qe = (QueryEvent) event;
String sql = qe.getSql().toString();
if ("COMMIT".equalsIgnoreCase(sql)) {
_log.debug("COMMIT sql: " + sql);
endXtion(qe);
continue;
} else if ("ROLLBACK".equalsIgnoreCase(sql)) {
_log.debug("ROLLBACK sql: " + sql);
rollbackXtion(qe);
continue;
} else {
// Ignore DDL statements for now
_log.debug("Likely DDL statement sql: " + sql);
continue;
}
} else if (event instanceof XidEvent) {
XidEvent xe = (XidEvent) event;
long xid = xe.getXid();
_log.debug("Treating XID event with xid = " + xid + " as commit for the transaction");
endXtion(xe);
continue;
} else if (event instanceof FormatDescriptionEvent) {
// we don't need process this event
_log.info("received FormatDescriptionEvent event");
continue;
} else if (event instanceof WriteRowsEvent) {
WriteRowsEvent wre = (WriteRowsEvent) event;
insertRows(wre);
} else if (event instanceof WriteRowsEventV2) {
WriteRowsEventV2 wre = (WriteRowsEventV2) event;
insertRows(wre);
} else if (event instanceof UpdateRowsEvent) {
UpdateRowsEvent ure = (UpdateRowsEvent) event;
updateRows(ure);
} else if (event instanceof UpdateRowsEventV2) {
UpdateRowsEventV2 ure = (UpdateRowsEventV2) event;
updateRows(ure);
} else if (event instanceof DeleteRowsEventV2) {
DeleteRowsEventV2 dre = (DeleteRowsEventV2) event;
deleteRows(dre);
} else if (event instanceof DeleteRowsEvent) {
DeleteRowsEvent dre = (DeleteRowsEvent) event;
deleteRows(dre);
} else if (event instanceof TableMapEvent) {
TableMapEvent tme = (TableMapEvent) event;
processTableMapEvent(tme);
} else {
_log.warn("Skipping !! Unknown OR event e: " + event);
continue;
}
if (_log.isDebugEnabled()) {
_log.debug("e: " + event);
}
} catch (Exception e) {
_log.error("failed to process binlog event, event: " + event, e);
}
}
}
_log.info("ORListener Thread done");
doShutdownNotify();
}
use of com.linkedin.databus2.producers.ds.Transaction in project databus by linkedin.
the class ORListener method startXtion.
private void startXtion(QueryEvent e) {
_currTxnStartReadTimestamp = System.nanoTime();
_log.info("startXtion" + e);
if (_transaction == null) {
_transaction = new Transaction();
} else {
throw new DatabusRuntimeException("Got startXtion without an endXtion for previous transaction");
}
}
use of com.linkedin.databus2.producers.ds.Transaction in project databus by linkedin.
the class TrailFilePositionSetter method findTxnScn.
private FilePositionResult findTxnScn(ConcurrentAppendableCompositeFileInputStream stream, long expScn, TransactionSCNFinderCallback callback) throws IOException {
FilePositionResult result = null;
//File prevFile = null;
ScnTxnPos pos = null;
callback.begin(expScn);
byte[] bArr = new byte[4 * 1024];
File prevFile = null;
File currFile = null;
long currPosition = -1;
List<String> lines = new ArrayList<String>();
List<Integer> lineEndPos = new ArrayList<Integer>();
String prevLine = null;
boolean done = false;
while (!done) {
prevFile = currFile;
int numBytes = stream.read(bArr);
if (numBytes <= 0)
break;
currFile = stream.getCurrentFile();
currPosition = stream.getCurrentPosition();
boolean spanFile = false;
int endOffset = 0;
if ((currFile != null) && (prevFile != null) && (!currFile.equals(prevFile))) {
// Crossed File boundary while reading this block. Track the endOffset where the file ends
spanFile = true;
endOffset = (int) (numBytes - currPosition);
}
prevLine = splitBytesByNewLines(bArr, numBytes, spanFile, endOffset, prevLine, lines, lineEndPos);
// On First Read, call the beginFileProcessing callback
if (prevFile == null)
callback.beginFileProcessing(currFile.getName());
int currOffset = 0;
for (int i = 0; i < lines.size(); i++) {
String l = lines.get(i);
//newLineLen can be one of (-1) File Boundary, (1) "\n" or "\r" , (2) "\r\n"
int newLineLen = lineEndPos.get(i) - currOffset - l.length();
try {
done = callback.processLine(l, newLineLen);
} catch (DatabusException e) {
_log.error("Got Exception when processing line (" + l + ").", e);
result = FilePositionResult.createErrorResult(e);
return result;
}
if (done)
break;
// when File boundary on this line
if (lineEndPos.get(i) == -1) {
callback.endFileProcessing(prevFile.getName());
callback.beginFileProcessing(currFile.getName());
}
currOffset = ((lineEndPos.get(i) < 0) ? currOffset + l.length() : lineEndPos.get(i));
}
lines.clear();
lineEndPos.clear();
}
// There could last transaction which would be complete with the prevLine added.
if (!done && (prevLine != null)) {
try {
callback.processLine(prevLine, NO_NEWLINE_LEN);
} catch (DatabusException e) {
if (_log.isDebugEnabled())
_log.debug("Got Exception when processing line (" + prevLine + ").", e);
result = FilePositionResult.createErrorResult(e);
return result;
}
}
pos = callback.getTxnPos();
if (callback.getNumTxnsSeen() <= 0) {
result = FilePositionResult.createNoTxnsFoundResult();
} else if (expScn == USE_LATEST_SCN) {
result = FilePositionResult.createFoundResult(pos);
} else if (expScn == USE_EARLIEST_SCN) {
result = FilePositionResult.createFoundResult(pos);
} else {
// Normal SCN
if (pos.getMaxScn() == expScn)
result = FilePositionResult.createFoundResult(pos);
else
result = FilePositionResult.createExactScnNotFoundResult(pos);
}
return result;
}
use of com.linkedin.databus2.producers.ds.Transaction in project databus by linkedin.
the class GoldenGateEventProducer method addEventToBuffer.
/**
*
* @param dbUpdates The dbUpdates present in the current transaction
* @param ti The meta information about the transaction. (See TransactionInfo class for more details).
* @throws DatabusException
* @throws UnsupportedKeyException
*/
protected void addEventToBuffer(List<TransactionState.PerSourceTransactionalUpdate> dbUpdates, TransactionInfo ti) throws DatabusException, UnsupportedKeyException {
if (dbUpdates.size() == 0)
throw new DatabusException("Cannot handle empty dbUpdates");
long scn = ti.getScn();
long timestamp = ti.getTransactionTimeStampNs();
EventSourceStatistics globalStats = getSource(GLOBAL_SOURCE_ID).getStatisticsBean();
/**
* We skip the start scn of the relay, we have already added a EOP for this SCN in the buffer.
* Why is this not a problem ?
* There are two cases:
* 1. When we use the earliest/latest scn if there is no maxScn (We don't really have a start point). So it's really OK to miss the first event.
* 2. If it's the maxSCN, then event was already seen by the relay.
*/
if (scn == _startPrevScn.get()) {
_log.info("Skipping this transaction, EOP already send for this event");
return;
}
getEventBuffer().startEvents();
int eventsInTransactionCount = 0;
List<EventReaderSummary> summaries = new ArrayList<EventReaderSummary>();
for (int i = 0; i < dbUpdates.size(); ++i) {
GenericRecord record = null;
TransactionState.PerSourceTransactionalUpdate perSourceUpdate = dbUpdates.get(i);
short sourceId = (short) perSourceUpdate.getSourceId();
// prepare stats collection per source
EventSourceStatistics perSourceStats = getSource(sourceId).getStatisticsBean();
Iterator<DbUpdateState.DBUpdateImage> dbUpdateIterator = perSourceUpdate.getDbUpdatesSet().iterator();
int eventsInDbUpdate = 0;
long dbUpdatesEventsSize = 0;
long startDbUpdatesMs = System.currentTimeMillis();
while (//TODO verify if there is any case where we need to rollback.
dbUpdateIterator.hasNext()) {
DbUpdateState.DBUpdateImage dbUpdate = dbUpdateIterator.next();
//Construct the Databus Event key, determine the key type and construct the key
Object keyObj = obtainKey(dbUpdate);
DbusEventKey eventKey = new DbusEventKey(keyObj);
//Get the logicalparition id
PartitionFunction partitionFunction = _partitionFunctionHashMap.get((int) sourceId);
short lPartitionId = partitionFunction.getPartition(eventKey);
record = dbUpdate.getGenericRecord();
//Write the event to the buffer
if (record == null)
throw new DatabusException("Cannot write event to buffer because record = " + record);
if (record.getSchema() == null)
throw new DatabusException("The record does not have a schema (null schema)");
try {
//Collect stats on number of dbUpdates for one source
eventsInDbUpdate++;
//Count of all the events in the current transaction
eventsInTransactionCount++;
// Serialize the row
ByteArrayOutputStream bos = new ByteArrayOutputStream();
Encoder encoder = new BinaryEncoder(bos);
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema());
writer.write(record, encoder);
byte[] serializedValue = bos.toByteArray();
//Get the md5 for the schema
SchemaId schemaId = SchemaId.createWithMd5(dbUpdate.getSchema());
//Determine the operation type and convert to dbus opcode
DbusOpcode opCode;
if (dbUpdate.getOpType() == DbUpdateState.DBUpdateImage.OpType.INSERT || dbUpdate.getOpType() == DbUpdateState.DBUpdateImage.OpType.UPDATE) {
opCode = DbusOpcode.UPSERT;
if (_log.isDebugEnabled())
_log.debug("The event with scn " + scn + " is INSERT/UPDATE");
} else if (dbUpdate.getOpType() == DbUpdateState.DBUpdateImage.OpType.DELETE) {
opCode = DbusOpcode.DELETE;
if (_log.isDebugEnabled())
_log.debug("The event with scn " + scn + " is DELETE");
} else {
throw new DatabusException("Unknown opcode from dbUpdate for event with scn:" + scn);
}
//Construct the dbusEvent info
DbusEventInfo dbusEventInfo = new DbusEventInfo(opCode, scn, (short) _pConfig.getId(), lPartitionId, timestamp, sourceId, schemaId.getByteArray(), serializedValue, false, false);
dbusEventInfo.setReplicated(dbUpdate.isReplicated());
perSourceStats.addEventCycle(1, ti.getTransactionTimeRead(), serializedValue.length, scn);
globalStats.addEventCycle(1, ti.getTransactionTimeRead(), serializedValue.length, scn);
long tsEnd = System.currentTimeMillis();
perSourceStats.addTimeOfLastDBAccess(tsEnd);
globalStats.addTimeOfLastDBAccess(tsEnd);
//Append to the event buffer
getEventBuffer().appendEvent(eventKey, dbusEventInfo, _statsCollector);
_rc.incrementEventCount();
dbUpdatesEventsSize += serializedValue.length;
} catch (IOException io) {
perSourceStats.addError();
globalStats.addEmptyEventCycle();
_log.error("Cannot create byte stream payload: " + dbUpdates.get(i).getSourceId());
}
}
long endDbUpdatesMs = System.currentTimeMillis();
long dbUpdatesElapsedTimeMs = endDbUpdatesMs - startDbUpdatesMs;
// Log Event Summary at logical source level
EventReaderSummary summary = new EventReaderSummary(sourceId, _monitoredSources.get(sourceId).getSourceName(), scn, eventsInDbUpdate, dbUpdatesEventsSize, -1L, /* Not supported */
dbUpdatesElapsedTimeMs, timestamp, timestamp, -1L);
if (_eventsLog.isInfoEnabled()) {
_eventsLog.info(summary.toString());
}
summaries.add(summary);
if (_log.isDebugEnabled())
_log.debug("There are " + eventsInDbUpdate + " events seen in the current dbUpdate");
}
// Log Event Summary at Physical source level
ReadEventCycleSummary summary = new ReadEventCycleSummary(_pConfig.getName(), summaries, scn, -1);
if (_eventsLog.isInfoEnabled()) {
_eventsLog.info(summary.toString());
}
_log.info("Writing " + eventsInTransactionCount + " events from transaction with scn: " + scn);
if (scn <= 0)
throw new DatabusException("Unable to write events to buffer because of negative/zero scn: " + scn);
getEventBuffer().endEvents(scn, _statsCollector);
_scn.set(scn);
if (getMaxScnReaderWriter() != null) {
try {
getMaxScnReaderWriter().saveMaxScn(_scn.get());
} catch (DatabusException e) {
_log.error("Cannot save scn = " + _scn + " for physical source = " + getName(), e);
}
}
}
use of com.linkedin.databus2.producers.ds.Transaction in project databus by linkedin.
the class GGXMLTrailTransactionFinder method processEnd.
/**
* When the transaction end is seen, this should be called to save SCN
* @throws DatabusException
*/
private void processEnd() throws DatabusException {
if (!_beginTxnSeen) {
_currTxnStr.setLength(0);
return;
}
_maxScn = Long.valueOf(-1);
_minScn = Long.MAX_VALUE;
try {
if (!_enableRegex) {
xpathQuery();
} else {
regexQuery();
}
} catch (DatabusTrailFileParseException ex) {
LOG.warn("empty/corrupted txn (" + ex.getMessage() + "); resetting invalid _txnPos (" + _txnPos + ") to _prevTxnPos (" + _prevTxnPos + ")");
_txnPos.copyFrom(_prevTxnPos);
// TODO: wire into metrics/monitoring (need accessor plus whatever lies on caller's end)
++_numInvalidTxnsSeen;
return;
}
_txnPos.setMaxScn(_maxScn);
_txnPos.setMinScn(_minScn);
_txnEndSeen = true;
_numTxnsSeen++;
if (!_firstTxnSeen) {
if (// common case: need to try previous trail file instead
(_targetScn >= 0) && (_targetScn < _minScn))
throw new DatabusException("SinceSCN is less than MinScn available in trail file. Requested SinceSCN is :" + _targetScn + " but found only : " + _minScn + " in Location " + _txnPos);
}
_firstTxnSeen = true;
_beginTxnSeen = false;
if (LOG.isDebugEnabled()) {
LOG.debug("Seen Txn : " + _txnPos);
}
}
Aggregations