use of com.linkedin.databus2.producers.EventCreationException in project databus by linkedin.
the class OracleAvroGenericEventFactory method putOracleRecord.
/**
* Copies the value of a simple-type event fields from DB field value to an Avro record
*
* @param fieldRecord the Avro record to populate
* @param fieldSchema the schema of the Avro field (must be a record)
* @param dbFieldValue the DB field value from the result set (cannot be null)
* @throws EventCreationException if conversion from the STRUCT type to the Avro type failed
*/
private void putOracleRecord(GenericRecord fieldRecord, Schema fieldSchema, Struct dbFieldValue) throws EventCreationException {
assert fieldSchema.getType() == Type.RECORD;
assert null != dbFieldValue;
try {
List<Field> fields = fieldSchema.getFields();
Object[] structAttribs = dbFieldValue.getAttributes();
if (fields.size() != structAttribs.length)
throw new EventCreationException("Avro field number mismatch: avro schema field# =" + fields.size() + " ; struct " + dbFieldValue.getSQLTypeName() + " field# = " + structAttribs.length);
for (Field field : fields) {
processRecordField(fieldRecord, field, structAttribs);
}
} catch (SQLException e) {
throw new EventCreationException("creation of field " + fieldSchema.getFullName(), e);
}
}
use of com.linkedin.databus2.producers.EventCreationException in project databus by linkedin.
the class OracleAvroGenericEventFactory method putArray.
private void putArray(GenericRecord record, String arrayFieldName, Schema schema, Array array) throws EventCreationException {
// Make sure this is an array type
if (schema.getType() != Type.ARRAY) {
throw new EventCreationException("Not an array type. " + schema.getName());
}
Schema elementSchema = schema.getElementType();
GenericArray<GenericRecord> avroArray = new GenericData.Array<GenericRecord>(0, schema);
try {
ResultSet arrayResultSet = array.getResultSet();
try {
while (arrayResultSet.next()) {
// Create the avro record and add it to the array
GenericRecord elemRecord = new GenericData.Record(elementSchema);
avroArray.add(elemRecord);
// Get the underlying structure from the database. Oracle returns the structure in the
// second column of the array's ResultSet
Struct struct = (Struct) arrayResultSet.getObject(2);
putOracleRecord(elemRecord, elementSchema, struct);
}
} finally {
arrayResultSet.close();
}
} catch (SQLException e) {
throw new EventCreationException("putArray error: " + e.getMessage(), e);
}
record.put(arrayFieldName, avroArray);
}
use of com.linkedin.databus2.producers.EventCreationException in project databus by linkedin.
the class OracleAvroGenericEventFactory method buildGenericRecord.
/**
* Build a GenericRecord from the contents of the current ResultSet row.
* @param rs
* @return
* @throws SQLException
*/
protected GenericRecord buildGenericRecord(ResultSet rs) throws SQLException, EventCreationException {
boolean traceEnabled = _log.isTraceEnabled();
if (traceEnabled) {
_log.trace("--- New Record ---");
}
// Initialize a new GenericData.Record from the event schema
GenericRecord record = new GenericData.Record(_eventSchema);
// Iterate over the array of fields defined in the Avro schema
List<Field> fields = _eventSchema.getFields();
for (Field field : fields) {
// Get the Avro field type information
String schemaFieldName = field.name();
// This is just field.schema() if field is not a union; but if it IS one,
// this is the schema of the first non-null type within the union:
Schema fieldSchema = SchemaHelper.unwindUnionSchema(field);
Type avroFieldType = fieldSchema.getType();
if (avroFieldType == Type.ARRAY) {
// Process as an array. Note that we're encoding to Avro's internal representation rather
// than to Avro binary format, which is what allows us to directly encode one of the union's
// inner types (here as well as in put()) instead of wrapping the inner type in a union.
// (Avro's binary encoding for unions includes an additional long index value before the
// encoding of the selected inner type.)
putArray(record, schemaFieldName, fieldSchema, getJdbcArray(rs, fieldSchema));
} else {
String databaseFieldName = SchemaHelper.getMetaField(field, "dbFieldName");
try {
Object databaseFieldValue = rs.getObject(databaseFieldName);
put(record, field, databaseFieldValue);
} catch (SQLException ex) {
_log.error("Failed to read column (" + databaseFieldName + ") for source (" + _sourceId + ")");
throw ex;
}
}
}
// Return the Avro record.
return record;
}
use of com.linkedin.databus2.producers.EventCreationException in project databus by linkedin.
the class OracleAvroGenericEventFactory method getJdbcArray.
private static Array getJdbcArray(ResultSet rs, Schema schema) throws EventCreationException {
// fails if schema isn't for array type
Schema elementSchema = schema.getElementType();
String dbFieldName = SchemaHelper.getMetaField(elementSchema, "dbFieldName");
if (dbFieldName == null) {
throw new EventCreationException("array field is missing required metadata dbFieldName. " + schema.getName());
}
Array array;
try {
array = rs.getArray(dbFieldName);
} catch (SQLException e) {
throw new EventCreationException("unable to read array field: " + dbFieldName + ": " + e.getMessage(), e);
}
return array;
}
use of com.linkedin.databus2.producers.EventCreationException in project databus by linkedin.
the class BootstrapAvroFileEventReader method readEventsFromAllSources.
@Override
public ReadEventCycleSummary readEventsFromAllSources(long sinceSCN) throws DatabusException, EventCreationException, UnsupportedKeyException {
List<EventReaderSummary> summaries = new ArrayList<EventReaderSummary>();
boolean error = false;
long startTS = System.currentTimeMillis();
long endScn = -1;
long minScn = Long.MAX_VALUE;
try {
for (OracleTriggerMonitoredSourceInfo sourceInfo : _sources) {
endScn = _config.getSeedWindowSCNMap().get(sourceInfo.getEventView());
minScn = Math.min(endScn, minScn);
LOG.info("Bootstrapping " + sourceInfo.getEventView());
_bootstrapEventBuffer.start(endScn);
String dir = _config.getAvroSeedInputDirMap().get(sourceInfo.getEventView());
File d = new File(dir);
EventReaderSummary summary = readEventsFromHadoopFiles(sourceInfo, d, endScn);
// Script assumes seeding is done for one schema at a time
_bootstrapEventBuffer.endEvents(BootstrapEventBuffer.END_OF_SOURCE, endScn, null);
summaries.add(summary);
}
} catch (Exception ex) {
error = true;
throw new DatabusException(ex);
} finally {
// Notify writer that I am done
if (error) {
_bootstrapEventBuffer.endEvents(BootstrapEventBuffer.ERROR_CODE, endScn, null);
LOG.error("Seeder stopping unexpectedly !!");
} else {
_bootstrapEventBuffer.endEvents(BootstrapEventBuffer.END_OF_FILE, endScn, null);
LOG.info("Completed Seeding !!");
}
}
LOG.info("Start SCN :" + minScn);
long endTS = System.currentTimeMillis();
ReadEventCycleSummary cycleSummary = new ReadEventCycleSummary("seeder", summaries, minScn, (endTS - startTS));
return cycleSummary;
}
Aggregations