use of com.google.code.or.common.glossary.Column in project databus by linkedin.
the class ORListener method generateAvroEvent.
private void generateAvroEvent(VersionedSchema vs, List<Column> cols, GenericRecord record) throws DatabusException {
// Get Ordered list of field by dbFieldPosition
List<Schema.Field> orderedFields = SchemaHelper.getOrderedFieldsByDBFieldPosition(vs);
// Build Map<AvroFieldType, Columns>
if (orderedFields.size() != cols.size()) {
throw new DatabusException("Mismatch in db schema vs avro schema");
}
int cnt = 0;
Map<String, Column> avroFieldCol = new HashMap<String, Column>();
for (Schema.Field field : orderedFields) {
avroFieldCol.put(field.name(), cols.get(cnt));
cnt++;
}
for (Schema.Field field : orderedFields) {
if (field.schema().getType() == Schema.Type.ARRAY) {
throw new DatabusException("The parser cannot handle ARRAY datatypes. Found in field: " + field);
} else {
// The current database field being processed
// (field is avro field name and databaseFieldName is oracle field name (one to one mapping)
String databaseFieldName = SchemaHelper.getMetaField(field, "dbFieldName").toLowerCase();
_log.debug("databaseFieldName = " + databaseFieldName);
// Insert the field into the generic record
insertFieldIntoRecord(avroFieldCol, record, databaseFieldName, field);
}
}
if (_log.isDebugEnabled()) {
_log.debug("Generic record = " + record);
}
}
use of com.google.code.or.common.glossary.Column in project databus by linkedin.
the class ORListener method generateAvroEvent.
private void generateAvroEvent(Schema schema, List<Column> cols, GenericRecord record) throws DatabusException {
// Get Ordered list of field by dbFieldPosition
List<Schema.Field> orderedFields = SchemaHelper.getOrderedFieldsByMetaField(schema, "dbFieldPosition", new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
Integer pos1 = Integer.parseInt(o1);
Integer pos2 = Integer.parseInt(o2);
return pos1.compareTo(pos2);
}
});
// Build Map<AvroFieldType, Columns>
if (orderedFields.size() != cols.size()) {
throw new DatabusException("Mismatch in db schema vs avro schema");
}
int cnt = 0;
Map<String, Column> avroFieldCol = new HashMap<String, Column>();
for (Schema.Field field : orderedFields) {
avroFieldCol.put(field.name(), cols.get(cnt));
cnt++;
}
for (Schema.Field field : orderedFields) {
if (field.schema().getType() == Schema.Type.ARRAY) {
throw new DatabusException("The parser cannot handle ARRAY datatypes. Found in field: " + field);
} else {
// The current database field being processed
// (field is avro field name and databaseFieldName is oracle field name (one to one mapping)
String databaseFieldName = SchemaHelper.getMetaField(field, "dbFieldName").toLowerCase();
_log.debug("databaseFieldName = " + databaseFieldName);
//Insert the field into the generic record
insertFieldIntoRecord(avroFieldCol, record, databaseFieldName, field);
}
}
if (_log.isDebugEnabled()) {
_log.debug("Generic record = " + record);
}
}
use of com.google.code.or.common.glossary.Column in project databus by linkedin.
the class ORListener method insertFieldIntoRecord.
/**
* Given the following :
* 1. A row data as a map of (dbFieldName, Column) data)
* 2. A generic record to populate
* 3. dbFieldName
* 4. avroFieldName
*
* The method locates the Column for the dbFieldName, extracts the data as a Java Object,
* inserts into the generic record with avroFieldName.name() as the key
*/
private void insertFieldIntoRecord(Map<String, Column> eventFields, GenericRecord record, String dbFieldName, Schema.Field avroField) throws DatabusException {
String f = avroField.name();
Column fieldValue = eventFields.get(f);
boolean isFieldNull = (fieldValue == null);
Object fieldValueObj = null;
try {
if (!isFieldNull)
fieldValueObj = orToAvroType(fieldValue, avroField);
else
fieldValueObj = null;
record.put(avroField.name(), fieldValueObj);
} catch (DatabusException e) {
_log.error("Unable to process field: " + avroField.name());
throw e;
}
return;
}
use of com.google.code.or.common.glossary.Column in project databus by linkedin.
the class ORListener method frameAvroRecord.
private void frameAvroRecord(long tableId, BinlogEventV4Header bh, List<Row> rl, final DbusOpcode doc) {
try {
final long timestampInNanos = bh.getTimestamp() * 1000000L;
final long scn = scn(_currFileNum, (int) bh.getPosition());
final boolean isReplicated = false;
final TableMapEvent tme = _tableMapEvents.get(tableId);
String tableName = tme.getDatabaseName().toString().toLowerCase() + "." + tme.getTableName().toString().toLowerCase();
VersionedSchema vs = _schemaRegistryService.fetchLatestVersionedSchemaBySourceName(_tableUriToSrcNameMap.get(tableName));
Schema schema = vs.getSchema();
if (_log.isDebugEnabled())
_log.debug("File Number :" + _currFileNum + ", Position :" + (int) bh.getPosition() + ", SCN =" + scn);
for (Row r : rl) {
List<Column> cl = r.getColumns();
GenericRecord gr = new GenericData.Record(schema);
generateAvroEvent(vs, cl, gr);
List<KeyPair> kps = generateKeyPair(gr, vs);
DbChangeEntry db = new DbChangeEntry(scn, timestampInNanos, gr, doc, isReplicated, schema, kps);
_transaction.getPerSourceTransaction(_tableUriToSrcIdMap.get(tableName)).mergeDbChangeEntrySet(db);
}
} catch (NoSuchSchemaException ne) {
throw new DatabusRuntimeException(ne);
} catch (DatabusException de) {
throw new DatabusRuntimeException(de);
}
}
Aggregations