Search in sources :

Example 11 with DatabusException

use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.

the class TokensState method onEndElement.

@Override
public void onEndElement(StateMachine stateMachine, XMLStreamReader xmlStreamReader) throws DatabusException, XMLStreamException {
    _currentStateType = STATETYPE.ENDELEMENT;
    if (LOG.isDebugEnabled())
        LOG.debug("picking scn from token state: " + stateMachine.tokenState.getScn());
    if ((ReplicationBitSetterStaticConfig.SourceType.TOKEN == stateMachine.getReplicationBitConfig().getSourceType()) && (!stateMachine.tokenState.isSeenReplicationField()) && (MissingValueBehavior.STOP_WITH_ERROR == stateMachine.getReplicationBitConfig().getMissingValueBehavior()))
        throw new DatabusException("The replication field was not seen in the trail files in the tokens, this field is mandatory! The scn associated is: " + stateMachine.tokenState.getScn());
    setScn(stateMachine.tokenState.getScn());
    if (stateMachine.tokenState.isSeenReplicationField())
        _isReplicated = stateMachine.tokenState.isReplicated();
    else
        _isReplicated = StateMachineHelper.verifyReplicationStatus(stateMachine.getReplicationValuePattern(), null, stateMachine.getReplicationBitConfig().getMissingValueBehavior());
    stateMachine.tokenState.cleanUpState(stateMachine, xmlStreamReader);
    xmlStreamReader.nextTag();
    setNextStateProcessor(stateMachine, xmlStreamReader);
}
Also used : DatabusException(com.linkedin.databus2.core.DatabusException)

Example 12 with DatabusException

use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.

the class TrailFilePositionSetter method findTxnScn.

private FilePositionResult findTxnScn(ConcurrentAppendableCompositeFileInputStream stream, long expScn, TransactionSCNFinderCallback callback) throws IOException {
    FilePositionResult result = null;
    //File prevFile = null;
    ScnTxnPos pos = null;
    callback.begin(expScn);
    byte[] bArr = new byte[4 * 1024];
    File prevFile = null;
    File currFile = null;
    long currPosition = -1;
    List<String> lines = new ArrayList<String>();
    List<Integer> lineEndPos = new ArrayList<Integer>();
    String prevLine = null;
    boolean done = false;
    while (!done) {
        prevFile = currFile;
        int numBytes = stream.read(bArr);
        if (numBytes <= 0)
            break;
        currFile = stream.getCurrentFile();
        currPosition = stream.getCurrentPosition();
        boolean spanFile = false;
        int endOffset = 0;
        if ((currFile != null) && (prevFile != null) && (!currFile.equals(prevFile))) {
            // Crossed File boundary while reading this block. Track the endOffset where the file ends
            spanFile = true;
            endOffset = (int) (numBytes - currPosition);
        }
        prevLine = splitBytesByNewLines(bArr, numBytes, spanFile, endOffset, prevLine, lines, lineEndPos);
        // On First Read, call the beginFileProcessing callback
        if (prevFile == null)
            callback.beginFileProcessing(currFile.getName());
        int currOffset = 0;
        for (int i = 0; i < lines.size(); i++) {
            String l = lines.get(i);
            //newLineLen can be one of (-1) File Boundary, (1) "\n" or "\r" , (2) "\r\n"
            int newLineLen = lineEndPos.get(i) - currOffset - l.length();
            try {
                done = callback.processLine(l, newLineLen);
            } catch (DatabusException e) {
                _log.error("Got Exception when processing line (" + l + ").", e);
                result = FilePositionResult.createErrorResult(e);
                return result;
            }
            if (done)
                break;
            // when File boundary on this line
            if (lineEndPos.get(i) == -1) {
                callback.endFileProcessing(prevFile.getName());
                callback.beginFileProcessing(currFile.getName());
            }
            currOffset = ((lineEndPos.get(i) < 0) ? currOffset + l.length() : lineEndPos.get(i));
        }
        lines.clear();
        lineEndPos.clear();
    }
    // There could last transaction which would be complete with the prevLine added.
    if (!done && (prevLine != null)) {
        try {
            callback.processLine(prevLine, NO_NEWLINE_LEN);
        } catch (DatabusException e) {
            if (_log.isDebugEnabled())
                _log.debug("Got Exception when processing line (" + prevLine + ").", e);
            result = FilePositionResult.createErrorResult(e);
            return result;
        }
    }
    pos = callback.getTxnPos();
    if (callback.getNumTxnsSeen() <= 0) {
        result = FilePositionResult.createNoTxnsFoundResult();
    } else if (expScn == USE_LATEST_SCN) {
        result = FilePositionResult.createFoundResult(pos);
    } else if (expScn == USE_EARLIEST_SCN) {
        result = FilePositionResult.createFoundResult(pos);
    } else {
        // Normal SCN
        if (pos.getMaxScn() == expScn)
            result = FilePositionResult.createFoundResult(pos);
        else
            result = FilePositionResult.createExactScnNotFoundResult(pos);
    }
    return result;
}
Also used : ArrayList(java.util.ArrayList) DatabusException(com.linkedin.databus2.core.DatabusException) File(java.io.File)

Example 13 with DatabusException

use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.

the class BufferInfoRequestProcessor method processInboundRequest.

private void processInboundRequest(DatabusRequest request, PhysicalPartition pPart) throws IOException, DatabusException {
    DbusEventBuffer evb = _eventBufferMult.getOneBuffer(pPart);
    if (null == evb) {
        LOG.error("BufferInfoRequest : Buffer not available for physical partition :" + pPart);
        throw new BufferNotFoundException("Buffer not available for partition :" + pPart);
    }
    BufferInfoResponse response = new BufferInfoResponse();
    response.setMinScn(evb.getMinScn());
    response.setMaxScn(evb.lastWrittenScn());
    response.setTimestampFirstEvent(evb.getTimestampOfFirstEvent());
    response.setTimestampLatestEvent(evb.getTimestampOfLatestDataEvent());
    writeJsonObjectToResponse(response, request);
}
Also used : BufferNotFoundException(com.linkedin.databus2.core.BufferNotFoundException) DbusEventBuffer(com.linkedin.databus.core.DbusEventBuffer) BufferInfoResponse(com.linkedin.databus.core.BufferInfoResponse)

Example 14 with DatabusException

use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.

the class BufferInfoRequestProcessor method process.

@Override
public DatabusRequest process(DatabusRequest request) throws IOException, RequestProcessingException, DatabusException {
    String category = request.getParams().getProperty(DatabusRequest.PATH_PARAM_NAME);
    if (null == category) {
        throw new InvalidRequestParamValueException(COMMAND_NAME, "category", "null");
    }
    if (category.startsWith(INBOUND_VIEW)) {
        String sourceIdStr = category.substring(INBOUND_VIEW.length());
        sourceIdStr = sourceIdStr.replace('/', ':');
        PhysicalPartition pPartition = PhysicalPartition.parsePhysicalPartitionString(sourceIdStr, ":");
        processInboundRequest(request, pPartition);
    } else {
        throw new InvalidRequestParamValueException(COMMAND_NAME, "category", category);
    }
    return request;
}
Also used : InvalidRequestParamValueException(com.linkedin.databus2.core.container.request.InvalidRequestParamValueException) PhysicalPartition(com.linkedin.databus.core.data_model.PhysicalPartition)

Example 15 with DatabusException

use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.

the class TypeInfoFactoryInteractive method buildTableType.

public TableTypeInfo buildTableType(Connection con, String tableOwner, String tableName, String pk, ConsoleReader reader, HashMap<String, String> dbFieldToAvroDataType) throws SQLException, IOException, DatabusException {
    PreparedStatement stmt = null;
    ResultSet rs = null;
    try {
        String fullTableName = tableOwner + "." + tableName;
        stmt = con.prepareStatement("SELECT * FROM " + fullTableName + " WHERE 0=1");
        rs = stmt.executeQuery();
        ResultSetMetaData rsmd = rs.getMetaData();
        List<FieldInfo> fields = new ArrayList<FieldInfo>();
        int numColumns = rsmd.getColumnCount();
        for (int column = 1; column <= numColumns; column++) {
            String columnName = rsmd.getColumnName(column);
            System.out.println("Processing column " + tableName + "." + columnName + ":" + rsmd.getColumnTypeName(column));
            int columnPrecision = rsmd.getPrecision(column);
            int columnScale = rsmd.getScale(column);
            String columnTypeName;
            String columnTypeOwner;
            String[] columnTypeParts = rsmd.getColumnTypeName(column).split("\\.");
            if (columnTypeParts.length == 1) {
                columnTypeOwner = null;
                columnTypeName = columnTypeParts[0];
            } else {
                columnTypeOwner = columnTypeParts[0];
                columnTypeName = columnTypeParts[1];
            }
            if (columnTypeName.equals("NUMBER")) {
                System.out.println("If you are not sure about the following question, please talk with your DBA or the database owner");
                System.out.println("The following datatypes will be used by the avro generator: ");
                System.out.println("If scale <= 6                                     ===> FLOAT (Irrespective of the precision)");
                System.out.println("If scale <= 17                                    ===> DOUBLE (Irrespective of the precision)");
                System.out.println("If (precision > 9 or precision = 0) and scale = 0 ===> LONG ");
                System.out.println("If precision <= 9 and scale = 0                   ===> INTEGER");
                SimpleTypeInfo typeInfoValidate = new SimpleTypeInfo(columnTypeName, columnPrecision, columnScale);
                if (columnPrecision == 0 && columnScale == 0)
                    System.out.println("Unable to determine the scale and precision for this column, please manually verify the the scale/precision in the oracle table ALL_TAB_COLUMNS");
                System.out.println("The precision [" + columnPrecision + "] and scale [" + columnScale + "] will be used for the field " + columnName + " which has oracle datatype " + columnTypeName + " and the avro datatype " + typeInfoValidate.getPrimitiveType() + " will be used. (yes - to use the printed values, no - to override the datatype with user input): ");
                //If the hashmap is present, this indicates that it's cli driven, we don't ask user input, we except it to passed through cli.
                if (dbFieldToAvroDataType == null) {
                    String line = checkAndRead(reader);
                    while (true) {
                        if (line.equals("yes")) {
                            System.out.println("Using the precision [" + columnPrecision + "] and scale [" + columnScale + "]");
                            break;
                        } else if (line.equals("no")) {
                            System.out.println("Overriding the avro datatype..");
                            System.out.println("Please enter the avro datatype you would like to use [FLOAT,DOUBLE,LONG,INTEGER]: ");
                            String datatype = checkAndRead(reader);
                            try {
                                ScalePrecision scalePrecision = getScaleAndPrecision(datatype);
                                columnPrecision = scalePrecision.getPrecision();
                                columnScale = scalePrecision.getScale();
                            } catch (DatabusException e) {
                                //Invalid input, retry.
                                continue;
                            }
                            typeInfoValidate = new SimpleTypeInfo(columnTypeName, columnPrecision, columnScale);
                            System.out.println("Based on your input, the avro datatype " + typeInfoValidate.getPrimitiveType() + " will be used for the field " + columnName);
                            break;
                        } else {
                            System.out.println("Invalid input, say 'yes' or 'no'");
                            line = checkAndRead(reader);
                        }
                    }
                } else {
                    if (dbFieldToAvroDataType.containsKey(columnName.trim())) {
                        String avroDataType = dbFieldToAvroDataType.get(columnName.trim());
                        ScalePrecision scalePrecision = getScaleAndPrecision(dbFieldToAvroDataType.get(columnName.trim()));
                        System.out.println("Using avro data type [" + avroDataType + "] for the column [" + columnName + "]");
                        columnPrecision = scalePrecision.getPrecision();
                        columnScale = scalePrecision.getScale();
                    } else {
                        System.out.println("The override for the column [" + columnName + "] is not present, this is expected from the user input in cli");
                        throw new DatabusException("Number override not present");
                    }
                }
            }
            TypeInfo typeInfo = getTypeInfo(con, columnTypeOwner, columnTypeName, columnPrecision, columnScale, "", reader, dbFieldToAvroDataType);
            FieldInfo field = new FieldInfo(columnName, typeInfo, column - 1);
            fields.add(field);
        }
        return new TableTypeInfo(tableOwner, tableName, fields, pk);
    } catch (IOException e) {
        System.out.println("Unable to process user input, please try again.");
        e.printStackTrace();
        throw e;
    } finally {
        SchemaUtils.close(rs);
        SchemaUtils.close(stmt);
    }
}
Also used : ArrayList(java.util.ArrayList) PreparedStatement(java.sql.PreparedStatement) IOException(java.io.IOException) ResultSetMetaData(java.sql.ResultSetMetaData) DatabusException(com.linkedin.databus2.core.DatabusException) ResultSet(java.sql.ResultSet)

Aggregations

DatabusException (com.linkedin.databus2.core.DatabusException)76 Test (org.testng.annotations.Test)21 ArrayList (java.util.ArrayList)19 IOException (java.io.IOException)14 Schema (org.apache.avro.Schema)14 ConditionCheck (com.linkedin.databus2.test.ConditionCheck)13 Logger (org.apache.log4j.Logger)13 InvalidConfigException (com.linkedin.databus.core.util.InvalidConfigException)12 Channel (org.jboss.netty.channel.Channel)12 DefaultHttpRequest (org.jboss.netty.handler.codec.http.DefaultHttpRequest)11 PhysicalPartition (com.linkedin.databus.core.data_model.PhysicalPartition)10 UnsupportedKeyException (com.linkedin.databus.core.UnsupportedKeyException)9 VersionedSchema (com.linkedin.databus2.schemas.VersionedSchema)9 InetSocketAddress (java.net.InetSocketAddress)9 SocketAddress (java.net.SocketAddress)9 SQLException (java.sql.SQLException)9 DefaultHttpResponse (org.jboss.netty.handler.codec.http.DefaultHttpResponse)9 HttpResponse (org.jboss.netty.handler.codec.http.HttpResponse)9 EventCreationException (com.linkedin.databus2.producers.EventCreationException)7 PhysicalSourceStaticConfig (com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig)7