Search in sources :

Example 6 with EventCreationException

use of com.linkedin.databus2.producers.EventCreationException in project databus by linkedin.

the class OracleAvroGenericEventFactory method putOracleRecord.

/**
   * Copies the value of a simple-type event fields from DB field value to an Avro record
   *
   * @param  fieldRecord            the Avro record to populate
   * @param  fieldSchema            the schema of the Avro field (must be a record)
   * @param  dbFieldValue           the DB field value from the result set (cannot be null)
   * @throws EventCreationException if conversion from the STRUCT type to the Avro type failed
   */
private void putOracleRecord(GenericRecord fieldRecord, Schema fieldSchema, Struct dbFieldValue) throws EventCreationException {
    assert fieldSchema.getType() == Type.RECORD;
    assert null != dbFieldValue;
    try {
        List<Field> fields = fieldSchema.getFields();
        Object[] structAttribs = dbFieldValue.getAttributes();
        if (fields.size() != structAttribs.length)
            throw new EventCreationException("Avro field number mismatch: avro schema field# =" + fields.size() + " ; struct " + dbFieldValue.getSQLTypeName() + " field# = " + structAttribs.length);
        for (Field field : fields) {
            processRecordField(fieldRecord, field, structAttribs);
        }
    } catch (SQLException e) {
        throw new EventCreationException("creation of field " + fieldSchema.getFullName(), e);
    }
}
Also used : Field(org.apache.avro.Schema.Field) SQLException(java.sql.SQLException) EventCreationException(com.linkedin.databus2.producers.EventCreationException)

Example 7 with EventCreationException

use of com.linkedin.databus2.producers.EventCreationException in project databus by linkedin.

the class OracleAvroGenericEventFactory method putArray.

private void putArray(GenericRecord record, String arrayFieldName, Schema schema, Array array) throws EventCreationException {
    // Make sure this is an array type
    if (schema.getType() != Type.ARRAY) {
        throw new EventCreationException("Not an array type. " + schema.getName());
    }
    Schema elementSchema = schema.getElementType();
    GenericArray<GenericRecord> avroArray = new GenericData.Array<GenericRecord>(0, schema);
    try {
        ResultSet arrayResultSet = array.getResultSet();
        try {
            while (arrayResultSet.next()) {
                // Create the avro record and add it to the array
                GenericRecord elemRecord = new GenericData.Record(elementSchema);
                avroArray.add(elemRecord);
                // Get the underlying structure from the database. Oracle returns the structure in the
                // second column of the array's ResultSet
                Struct struct = (Struct) arrayResultSet.getObject(2);
                putOracleRecord(elemRecord, elementSchema, struct);
            }
        } finally {
            arrayResultSet.close();
        }
    } catch (SQLException e) {
        throw new EventCreationException("putArray error: " + e.getMessage(), e);
    }
    record.put(arrayFieldName, avroArray);
}
Also used : GenericArray(org.apache.avro.generic.GenericArray) Array(java.sql.Array) SQLException(java.sql.SQLException) EventCreationException(com.linkedin.databus2.producers.EventCreationException) Schema(org.apache.avro.Schema) ResultSet(java.sql.ResultSet) GenericRecord(org.apache.avro.generic.GenericRecord) GenericRecord(org.apache.avro.generic.GenericRecord) Struct(java.sql.Struct)

Example 8 with EventCreationException

use of com.linkedin.databus2.producers.EventCreationException in project databus by linkedin.

the class OracleAvroGenericEventFactory method buildGenericRecord.

/**
   * Build a GenericRecord from the contents of the current ResultSet row.
   * @param rs
   * @return
   * @throws SQLException
   */
protected GenericRecord buildGenericRecord(ResultSet rs) throws SQLException, EventCreationException {
    boolean traceEnabled = _log.isTraceEnabled();
    if (traceEnabled) {
        _log.trace("--- New Record ---");
    }
    // Initialize a new GenericData.Record from the event schema
    GenericRecord record = new GenericData.Record(_eventSchema);
    // Iterate over the array of fields defined in the Avro schema
    List<Field> fields = _eventSchema.getFields();
    for (Field field : fields) {
        // Get the Avro field type information
        String schemaFieldName = field.name();
        // This is just field.schema() if field is not a union; but if it IS one,
        // this is the schema of the first non-null type within the union:
        Schema fieldSchema = SchemaHelper.unwindUnionSchema(field);
        Type avroFieldType = fieldSchema.getType();
        if (avroFieldType == Type.ARRAY) {
            // Process as an array.  Note that we're encoding to Avro's internal representation rather
            // than to Avro binary format, which is what allows us to directly encode one of the union's
            // inner types (here as well as in put()) instead of wrapping the inner type in a union.
            // (Avro's binary encoding for unions includes an additional long index value before the
            // encoding of the selected inner type.)
            putArray(record, schemaFieldName, fieldSchema, getJdbcArray(rs, fieldSchema));
        } else {
            String databaseFieldName = SchemaHelper.getMetaField(field, "dbFieldName");
            try {
                Object databaseFieldValue = rs.getObject(databaseFieldName);
                put(record, field, databaseFieldValue);
            } catch (SQLException ex) {
                _log.error("Failed to read column (" + databaseFieldName + ") for source (" + _sourceId + ")");
                throw ex;
            }
        }
    }
    // Return the Avro record.
    return record;
}
Also used : Field(org.apache.avro.Schema.Field) Type(org.apache.avro.Schema.Type) SourceType(com.linkedin.databus2.relay.config.ReplicationBitSetterStaticConfig.SourceType) SQLException(java.sql.SQLException) Schema(org.apache.avro.Schema) GenericRecord(org.apache.avro.generic.GenericRecord) GenericRecord(org.apache.avro.generic.GenericRecord)

Example 9 with EventCreationException

use of com.linkedin.databus2.producers.EventCreationException in project databus by linkedin.

the class OracleAvroGenericEventFactory method getJdbcArray.

private static Array getJdbcArray(ResultSet rs, Schema schema) throws EventCreationException {
    // fails if schema isn't for array type
    Schema elementSchema = schema.getElementType();
    String dbFieldName = SchemaHelper.getMetaField(elementSchema, "dbFieldName");
    if (dbFieldName == null) {
        throw new EventCreationException("array field is missing required metadata dbFieldName. " + schema.getName());
    }
    Array array;
    try {
        array = rs.getArray(dbFieldName);
    } catch (SQLException e) {
        throw new EventCreationException("unable to read array field: " + dbFieldName + ": " + e.getMessage(), e);
    }
    return array;
}
Also used : GenericArray(org.apache.avro.generic.GenericArray) Array(java.sql.Array) SQLException(java.sql.SQLException) EventCreationException(com.linkedin.databus2.producers.EventCreationException) Schema(org.apache.avro.Schema)

Example 10 with EventCreationException

use of com.linkedin.databus2.producers.EventCreationException in project databus by linkedin.

the class BootstrapAvroFileEventReader method readEventsFromAllSources.

@Override
public ReadEventCycleSummary readEventsFromAllSources(long sinceSCN) throws DatabusException, EventCreationException, UnsupportedKeyException {
    List<EventReaderSummary> summaries = new ArrayList<EventReaderSummary>();
    boolean error = false;
    long startTS = System.currentTimeMillis();
    long endScn = -1;
    long minScn = Long.MAX_VALUE;
    try {
        for (OracleTriggerMonitoredSourceInfo sourceInfo : _sources) {
            endScn = _config.getSeedWindowSCNMap().get(sourceInfo.getEventView());
            minScn = Math.min(endScn, minScn);
            LOG.info("Bootstrapping " + sourceInfo.getEventView());
            _bootstrapEventBuffer.start(endScn);
            String dir = _config.getAvroSeedInputDirMap().get(sourceInfo.getEventView());
            File d = new File(dir);
            EventReaderSummary summary = readEventsFromHadoopFiles(sourceInfo, d, endScn);
            // Script assumes seeding is done for one schema at a time
            _bootstrapEventBuffer.endEvents(BootstrapEventBuffer.END_OF_SOURCE, endScn, null);
            summaries.add(summary);
        }
    } catch (Exception ex) {
        error = true;
        throw new DatabusException(ex);
    } finally {
        // Notify writer that I am done
        if (error) {
            _bootstrapEventBuffer.endEvents(BootstrapEventBuffer.ERROR_CODE, endScn, null);
            LOG.error("Seeder stopping unexpectedly !!");
        } else {
            _bootstrapEventBuffer.endEvents(BootstrapEventBuffer.END_OF_FILE, endScn, null);
            LOG.info("Completed Seeding !!");
        }
    }
    LOG.info("Start SCN :" + minScn);
    long endTS = System.currentTimeMillis();
    ReadEventCycleSummary cycleSummary = new ReadEventCycleSummary("seeder", summaries, minScn, (endTS - startTS));
    return cycleSummary;
}
Also used : ReadEventCycleSummary(com.linkedin.databus2.producers.db.ReadEventCycleSummary) EventReaderSummary(com.linkedin.databus2.producers.db.EventReaderSummary) DatabusException(com.linkedin.databus2.core.DatabusException) ArrayList(java.util.ArrayList) File(java.io.File) EventCreationException(com.linkedin.databus2.producers.EventCreationException) DatabusException(com.linkedin.databus2.core.DatabusException) InvalidConfigException(com.linkedin.databus.core.util.InvalidConfigException) IOException(java.io.IOException) UnsupportedKeyException(com.linkedin.databus.core.UnsupportedKeyException) OracleTriggerMonitoredSourceInfo(com.linkedin.databus2.producers.db.OracleTriggerMonitoredSourceInfo)

Aggregations

EventCreationException (com.linkedin.databus2.producers.EventCreationException)14 SQLException (java.sql.SQLException)11 UnsupportedKeyException (com.linkedin.databus.core.UnsupportedKeyException)6 InvalidConfigException (com.linkedin.databus.core.util.InvalidConfigException)6 DatabusException (com.linkedin.databus2.core.DatabusException)6 IOException (java.io.IOException)6 Schema (org.apache.avro.Schema)6 ArrayList (java.util.ArrayList)5 OracleTriggerMonitoredSourceInfo (com.linkedin.databus2.producers.db.OracleTriggerMonitoredSourceInfo)4 SourceType (com.linkedin.databus2.relay.config.ReplicationBitSetterStaticConfig.SourceType)4 Type (org.apache.avro.Schema.Type)4 GenericRecord (org.apache.avro.generic.GenericRecord)4 EventReaderSummary (com.linkedin.databus2.producers.db.EventReaderSummary)3 NoSuchSchemaException (com.linkedin.databus2.schemas.NoSuchSchemaException)3 Array (java.sql.Array)3 ResultSet (java.sql.ResultSet)3 Field (org.apache.avro.Schema.Field)3 GenericArray (org.apache.avro.generic.GenericArray)3 EventProducer (com.linkedin.databus2.producers.EventProducer)2 PartitionFunction (com.linkedin.databus2.producers.PartitionFunction)2