Search in sources :

Example 1 with DatabusException

use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.

the class ORListener method generateKeyPair.

private List<KeyPair> generateKeyPair(List<Column> cl, Schema schema) throws DatabusException {
    Object o = null;
    Schema.Type st = null;
    // Build PrimaryKeySchema
    String pkFieldName = SchemaHelper.getMetaField(schema, "pk");
    if (pkFieldName == null) {
        throw new DatabusException("No primary key specified in the schema");
    }
    PrimaryKeySchema pkSchema = new PrimaryKeySchema(pkFieldName);
    List<Schema.Field> fields = schema.getFields();
    List<KeyPair> kpl = new ArrayList<KeyPair>();
    int cnt = 0;
    for (Schema.Field field : fields) {
        if (pkSchema.isPartOfPrimaryKey(field)) {
            o = cl.get(cnt).getValue();
            st = field.schema().getType();
            KeyPair kp = new KeyPair(o, st);
            kpl.add(kp);
        }
        cnt++;
    }
    return kpl;
}
Also used : KeyPair(com.linkedin.databus2.producers.ds.KeyPair) Schema(org.apache.avro.Schema) PrimaryKeySchema(com.linkedin.databus2.producers.ds.PrimaryKeySchema) VersionedSchema(com.linkedin.databus2.schemas.VersionedSchema) ArrayList(java.util.ArrayList) Field(org.apache.avro.Schema.Field) Field(org.apache.avro.Schema.Field) DatabusException(com.linkedin.databus2.core.DatabusException) PrimaryKeySchema(com.linkedin.databus2.producers.ds.PrimaryKeySchema)

Example 2 with DatabusException

use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.

the class ORListener method orToAvroType.

/**
   * Given a OR Column, it returns a corresponding Java object that can be inserted into
   * AVRO record
 * @param avroField 
   */
private Object orToAvroType(Column s, Field avroField) throws DatabusException {
    if (s instanceof BitColumn) {
        // This is in  byte order
        BitColumn bc = (BitColumn) s;
        byte[] ba = bc.getValue();
        ByteBuffer b = ByteBuffer.wrap(ba);
        return b;
    } else if (s instanceof StringColumn) {
        StringColumn sc = (StringColumn) s;
        String str = new String(sc.getValue(), StringUtils.DEFAULT_CHARSET);
        return str;
    } else if (s instanceof BlobColumn) {
        BlobColumn bc = (BlobColumn) s;
        byte[] ba = bc.getValue();
        //distinguish between blobs and clobs
        try {
            return new String(ba, StringUtils.DEFAULT_CHARSET);
        } catch (Exception e) {
            return ByteBuffer.wrap(ba);
        }
    } else if (s instanceof DateColumn) {
        DateColumn dc = (DateColumn) s;
        Date d = dc.getValue();
        Long l = d.getTime();
        return l;
    } else if (s instanceof DatetimeColumn) {
        DatetimeColumn dc = (DatetimeColumn) s;
        Date d = dc.getValue();
        //Bug in OR for DateTIme and Time data-types. MilliSeconds is not available for these columns but is set with currentMillis() wrongly.
        Long t1 = (d.getTime() / 1000) * 1000;
        return t1;
    } else if (s instanceof DecimalColumn) {
        DecimalColumn dc = (DecimalColumn) s;
        Object val = Double.valueOf(dc.getValue().doubleValue());
        return val;
    } else if (s instanceof DoubleColumn) {
        DoubleColumn dc = (DoubleColumn) s;
        Double d = dc.getValue();
        return d;
    } else if (s instanceof EnumColumn) {
        EnumColumn ec = (EnumColumn) s;
        Integer i = ec.getValue();
        return i;
    } else if (s instanceof FloatColumn) {
        FloatColumn fc = (FloatColumn) s;
        Float f = fc.getValue();
        return f;
    } else if (s instanceof Int24Column) {
        Int24Column ic = (Int24Column) s;
        Integer i = ic.getValue();
        if (i < 0 && SchemaHelper.getMetaField(avroField, "dbFieldType").contains("UNSIGNED")) {
            i += ORListener.MEDIUMINT_MAX_VALUE;
        }
        return i;
    } else if (s instanceof LongColumn) {
        LongColumn lc = (LongColumn) s;
        Long l = lc.getValue().longValue();
        if (l < 0 && SchemaHelper.getMetaField(avroField, "dbFieldType").contains("UNSIGNED")) {
            l += ORListener.INTEGER_MAX_VALUE;
        }
        return l;
    } else if (s instanceof LongLongColumn) {
        LongLongColumn llc = (LongLongColumn) s;
        BigInteger b = new BigInteger(llc.getValue() + "");
        if (b.compareTo(BigInteger.ZERO) < 0 && SchemaHelper.getMetaField(avroField, "dbFieldType").contains("UNSIGNED")) {
            b = b.add(ORListener.BIGINT_MAX_VALUE);
        }
        return b;
    } else if (s instanceof NullColumn) {
        return null;
    } else if (s instanceof SetColumn) {
        SetColumn sc = (SetColumn) s;
        Long l = sc.getValue();
        return l;
    } else if (s instanceof ShortColumn) {
        ShortColumn sc = (ShortColumn) s;
        Integer i = sc.getValue();
        if (i < 0 && SchemaHelper.getMetaField(avroField, "dbFieldType").contains("UNSIGNED")) {
            i = i + ORListener.SMALLINT_MAX_VALUE;
        }
        return i;
    } else if (s instanceof TimeColumn) {
        TimeColumn tc = (TimeColumn) s;
        Time t = tc.getValue();
        /**
       * There is a bug in OR where instead of using the default year as 1970, it is using 0070.
       * This is a temporary measure to resolve it by working around at this layer. The value obtained from OR is subtracted from "0070-00-01 00:00:00"
       */
        Calendar c = Calendar.getInstance();
        c.set(70, 0, 1, 0, 0, 0);
        // round off the milli-seconds as TimeColumn type has only seconds granularity but Calendar implementation
        // includes milli-second (System.currentTimeMillis() at the time of instantiation)
        long rawVal = (c.getTimeInMillis() / 1000) * 1000;
        long val2 = (t.getTime() / 1000) * 1000;
        long offset = val2 - rawVal;
        return offset;
    } else if (s instanceof TimestampColumn) {
        TimestampColumn tsc = (TimestampColumn) s;
        Timestamp ts = tsc.getValue();
        Long t = ts.getTime();
        return t;
    } else if (s instanceof DatetimeColumn) {
        DatetimeColumn tsc = (DatetimeColumn) s;
        Long t = tsc.getValue().getTime();
        return t;
    } else if (s instanceof Datetime2Column) {
        Datetime2Column tsc = (Datetime2Column) s;
        Long t = tsc.getValue().getTime();
        return t;
    } else if (s instanceof TinyColumn) {
        TinyColumn tc = (TinyColumn) s;
        Integer i = tc.getValue();
        if (i < 0 && SchemaHelper.getMetaField(avroField, "dbFieldType").contains("UNSIGNED")) {
            i = i + ORListener.TINYINT_MAX_VALUE;
        }
        return i;
    } else if (s instanceof YearColumn) {
        YearColumn yc = (YearColumn) s;
        Integer i = yc.getValue();
        return i;
    } else {
        throw new DatabusRuntimeException("Unknown MySQL type in the event" + s.getClass() + " Object = " + s);
    }
}
Also used : BlobColumn(com.google.code.or.common.glossary.column.BlobColumn) SetColumn(com.google.code.or.common.glossary.column.SetColumn) DecimalColumn(com.google.code.or.common.glossary.column.DecimalColumn) Time(java.sql.Time) FloatColumn(com.google.code.or.common.glossary.column.FloatColumn) Timestamp(java.sql.Timestamp) EnumColumn(com.google.code.or.common.glossary.column.EnumColumn) LongColumn(com.google.code.or.common.glossary.column.LongColumn) LongLongColumn(com.google.code.or.common.glossary.column.LongLongColumn) BitColumn(com.google.code.or.common.glossary.column.BitColumn) YearColumn(com.google.code.or.common.glossary.column.YearColumn) TimeColumn(com.google.code.or.common.glossary.column.TimeColumn) StringColumn(com.google.code.or.common.glossary.column.StringColumn) Calendar(java.util.Calendar) TimestampColumn(com.google.code.or.common.glossary.column.TimestampColumn) DatetimeColumn(com.google.code.or.common.glossary.column.DatetimeColumn) ByteBuffer(java.nio.ByteBuffer) Int24Column(com.google.code.or.common.glossary.column.Int24Column) NoSuchSchemaException(com.linkedin.databus2.schemas.NoSuchSchemaException) DatabusException(com.linkedin.databus2.core.DatabusException) DatabusRuntimeException(com.linkedin.databus.core.DatabusRuntimeException) Date(java.util.Date) LongLongColumn(com.google.code.or.common.glossary.column.LongLongColumn) BigInteger(java.math.BigInteger) Datetime2Column(com.google.code.or.common.glossary.column.Datetime2Column) DoubleColumn(com.google.code.or.common.glossary.column.DoubleColumn) DateColumn(com.google.code.or.common.glossary.column.DateColumn) ShortColumn(com.google.code.or.common.glossary.column.ShortColumn) NullColumn(com.google.code.or.common.glossary.column.NullColumn) TinyColumn(com.google.code.or.common.glossary.column.TinyColumn) BigInteger(java.math.BigInteger) DatabusRuntimeException(com.linkedin.databus.core.DatabusRuntimeException)

Example 3 with DatabusException

use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.

the class JmxShutdownThread method initializeContainerCommandProcessors.

protected void initializeContainerCommandProcessors() throws DatabusException {
    _processorRegistry.register(ContainerStatsRequestProcessor.COMMAND_NAME, new ContainerStatsRequestProcessor(null, this));
    _processorRegistry.register(JavaStatsRequestProcessor.COMMAND_NAME, new JavaStatsRequestProcessor(null));
    String healthcheckPrefix = ContainerAdminRequestProcessor.extractCommandRoot(_containerStaticConfig.getHealthcheckPath());
    LOG.info("healthcheck command root: " + healthcheckPrefix);
    _processorRegistry.register(healthcheckPrefix, new ContainerAdminRequestProcessor(null, _componentStatus, _containerStaticConfig.getHealthcheckPath()));
}
Also used : ContainerStatsRequestProcessor(com.linkedin.databus2.core.container.request.ContainerStatsRequestProcessor) JavaStatsRequestProcessor(com.linkedin.databus2.core.container.request.JavaStatsRequestProcessor) ContainerAdminRequestProcessor(com.linkedin.databus2.core.container.request.ContainerAdminRequestProcessor)

Example 4 with DatabusException

use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.

the class SimpleBinaryDatabusRequestDecoder method decode.

@Override
protected Object decode(ChannelHandlerContext ctx, Channel channel, ChannelBuffer buffer) throws Exception {
    int resetIndex = buffer.readerIndex();
    Object result = null;
    while (State.INCOMPLETE_DATA != _state && null == result && buffer.readable()) {
        switch(_state) {
            case EXPECT_COMMAND:
                {
                    byte opcode = buffer.readByte();
                    // Integer.toHexString((opcode) & 0xFF));
                    if (opcode != _curOpcode || -1 == _curOpcode) {
                        _currentParser = _commandsRegistry.createParser(opcode, ctx.getChannel(), _byteOrder);
                        if (null == _currentParser) {
                            _curOpcode = -1;
                            returnError(ctx.getChannel(), ErrorResponse.createUnknownCommandResponse(opcode), buffer, State.EXPECT_COMMAND, true, true);
                        } else {
                            _curOpcode = opcode;
                            _state = State.EXPECT_MORE_DATA;
                            ChannelPipeline pipe = ctx.getPipeline();
                            pipe.replace(REQUEST_EXEC_HANDLER_NAME, REQUEST_EXEC_HANDLER_NAME, _commandsRegistry.createExecHandler(opcode, ctx.getChannel()));
                            _currentParser.startNew();
                            resetIndex = buffer.readerIndex();
                        }
                    } else {
                        _state = State.EXPECT_MORE_DATA;
                        _currentParser.startNew();
                        resetIndex = buffer.readerIndex();
                    }
                    break;
                }
            case EXPECT_MORE_DATA:
                {
                    if (null == _currentParser) {
                        returnError(ctx.getChannel(), ErrorResponse.createInternalServerErrorResponse(new DatabusException("expecting more data but no parser")), buffer, State.EXPECT_COMMAND, true, true);
                    } else {
                        try {
                            BinaryCommandParser.ParseResult parseResult = _currentParser.parseBinary(buffer);
                            switch(parseResult) {
                                case DISCARD:
                                    {
                                        /* do nothing */
                                        break;
                                    }
                                case INCOMPLETE_DATA:
                                    {
                                        _state = State.INCOMPLETE_DATA;
                                        break;
                                    }
                                case EXPECT_MORE:
                                    {
                                        _state = State.EXPECT_MORE_DATA;
                                        break;
                                    }
                                case PASS_THROUGH:
                                    {
                                        result = buffer;
                                        break;
                                    }
                                case DONE:
                                    {
                                        if (null == _currentParser.getError()) {
                                            result = _currentParser.getCommand();
                                            _state = State.EXPECT_COMMAND;
                                        } else {
                                            returnError(ctx.getChannel(), _currentParser.getError(), buffer, State.EXPECT_COMMAND, true, true);
                                        }
                                        break;
                                    }
                                default:
                                    {
                                        returnError(ctx.getChannel(), ErrorResponse.createInternalServerErrorResponse(new DatabusException("unknown parser return code" + parseResult)), buffer, State.EXPECT_COMMAND, true, true);
                                    }
                            }
                        } catch (UnsupportedProtocolVersionException upve) {
                            returnError(ctx.getChannel(), ErrorResponse.createUnsupportedProtocolVersionResponse(upve.getProtocolVerson()), buffer, State.EXPECT_COMMAND, true, true);
                        } catch (Exception ex) {
                            returnError(ctx.getChannel(), ErrorResponse.createInternalServerErrorResponse(ex), buffer, State.EXPECT_COMMAND, true, true);
                        }
                    }
                    break;
                }
            default:
                {
                    returnError(ctx.getChannel(), ErrorResponse.createInternalServerErrorResponse(new DatabusException("unknown state: " + _state)), buffer, State.EXPECT_COMMAND, true, true);
                }
        }
    }
    if (State.INCOMPLETE_DATA == _state) {
        buffer.readerIndex(resetIndex);
        result = null;
        _state = State.EXPECT_MORE_DATA;
    }
    if (State.EXPECT_COMMAND == _state) {
        if (null != _readTimeoutHandler && _readTimeoutHandler.isStarted())
            _readTimeoutHandler.stop();
    } else {
        if (null != _readTimeoutHandler && !_readTimeoutHandler.isStarted())
            _readTimeoutHandler.start(ctx);
    }
    return result;
}
Also used : DatabusException(com.linkedin.databus2.core.DatabusException) ChannelPipeline(org.jboss.netty.channel.ChannelPipeline) DatabusException(com.linkedin.databus2.core.DatabusException)

Example 5 with DatabusException

use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.

the class TestDatabusRelayEvents method testV2Events.

/**
 * Stuffs an event buffer with both a v1 and a v2 event, then reads the buffer two ways:
 * first accepting only v1 events (verifying conversion of the v2 event to v1); then accepting
 * both v1 and v2 events.
 *
 * Note that the version of the _EOP_ events must match the version of the event factory,
 * regardless of the versions of any preceding "real" events.  (This matches DbusEventBuffer
 * behavior; see the serializeLongKeyEndOfPeriodMarker() call in endEvents() for details.)
 */
@Test
public void testV2Events() throws KeyTypeNotImplementedException, InvalidEventException, IOException, DatabusException {
    final Logger log = Logger.getLogger("TestDatabusRelayEvents.testV2Events");
    log.setLevel(Level.DEBUG);
    String[] srcs = { "com.linkedin.events.example.fake.FakeSchema" };
    String pSourceName = DatabusRelayTestUtil.getPhysicalSrcName(srcs[0]);
    short srcId = 2;
    short pId = 1;
    int relayPort = Utils.getAvailablePort(11993);
    // create relay
    final DatabusRelayMain relay1 = createRelay(relayPort, pId, srcs);
    DatabusRelayTestUtil.RelayRunner r1 = null;
    ClientRunner cr = null;
    try {
        // EventProducer[] producers = relay1.getProducers();
        r1 = new DatabusRelayTestUtil.RelayRunner(relay1);
        log.info("Relay created");
        DbusEventBufferMult bufMult = relay1.getEventBuffer();
        PhysicalPartition pPartition = new PhysicalPartition((int) pId, pSourceName);
        DbusEventBuffer buf = (DbusEventBuffer) bufMult.getDbusEventBufferAppendable(pPartition);
        log.info("create some events");
        long windowScn = 100L;
        ByteBuffer serializationBuffer = addEvent(windowScn, srcId, relay1.getSchemaRegistryService().fetchSchemaIdForSourceNameAndVersion(srcs[0], 2).getByteArray(), pId, DbusEventFactory.DBUS_EVENT_V2);
        ReadableByteChannel channel = Channels.newChannel(new ByteBufferInputStream(serializationBuffer));
        int readEvents = buf.readEvents(channel);
        log.info("successfully read in " + readEvents + " events ");
        channel.close();
        windowScn = 101L;
        serializationBuffer = addEvent(windowScn, srcId, relay1.getSchemaRegistryService().fetchSchemaIdForSourceNameAndVersion(srcs[0], 2).getByteArray(), pId, DbusEventFactory.DBUS_EVENT_V1);
        channel = Channels.newChannel(new ByteBufferInputStream(serializationBuffer));
        readEvents = buf.readEvents(channel);
        log.info("successfully read in " + readEvents + " events ");
        channel.close();
        log.info("starting relay on port " + relayPort);
        r1.start();
        // TestUtil.sleep(10*1000);
        // wait until relay comes up
        TestUtil.assertWithBackoff(new ConditionCheck() {

            @Override
            public boolean check() {
                return relay1.isRunningStatus();
            }
        }, "Relay hasn't come up completely ", 30000, LOG);
        log.info("now create client");
        String srcSubscriptionString = TestUtil.join(srcs, ",");
        String serverName = "localhost:" + relayPort;
        final EventsCountingConsumer countingConsumer = new EventsCountingConsumer();
        int id = (RngUtils.randomPositiveInt() % 10000) + 1;
        DatabusSourcesConnection clientConn = RelayEventProducer.createDatabusSourcesConnection("testProducer", id, serverName, srcSubscriptionString, countingConsumer, 1 * 1024 * 1024, 50000, 30 * 1000, 100, 15 * 1000, 1, true, DatabusClientNettyThreadPools.createNettyThreadPools(id), 0, DbusEventFactory.DBUS_EVENT_V1, 0);
        cr = new ClientRunner(clientConn);
        log.info("starting client");
        cr.start();
        // wait till client gets the event
        TestUtil.assertWithBackoff(new ConditionCheck() {

            @Override
            public boolean check() {
                int events = countingConsumer.getNumDataEvents();
                LOG.info("client got " + events + " events");
                return events == 2;
            }
        }, "Consumer didn't get 2 events ", 64 * 1024, LOG);
        // asserts
        Assert.assertEquals(countingConsumer.getNumDataEvents(), 2);
        Assert.assertEquals(countingConsumer.getNumWindows(), 2);
        Assert.assertEquals(countingConsumer.getNumDataEvents(DbusEventFactory.DBUS_EVENT_V1), 2);
        log.info("shutdown first client");
        clientConn.stop();
        cr.shutdown();
        TestUtil.sleep(1000);
        cr = null;
        log.info("start another client who understands V2");
        final EventsCountingConsumer countingConsumer1 = new EventsCountingConsumer();
        clientConn = RelayEventProducer.createDatabusSourcesConnection("testProducer", id, serverName, srcSubscriptionString, countingConsumer1, 1 * 1024 * 1024, 50000, 30 * 1000, 100, 15 * 1000, 1, true, DatabusClientNettyThreadPools.createNettyThreadPools(id), 0, DbusEventFactory.DBUS_EVENT_V2, 0);
        cr = new ClientRunner(clientConn);
        cr.start();
        log.info("wait till client gets the event");
        TestUtil.assertWithBackoff(new ConditionCheck() {

            @Override
            public boolean check() {
                int events = countingConsumer1.getNumDataEvents();
                LOG.debug("client got " + events + " events");
                return events == 2;
            }
        }, "Consumer didn't get 2 events ", 64 * 1024, LOG);
        // asserts
        Assert.assertEquals(countingConsumer1.getNumDataEvents(), 2);
        Assert.assertEquals(countingConsumer1.getNumWindows(), 2);
        Assert.assertEquals(countingConsumer1.getNumDataEvents(DbusEventFactory.DBUS_EVENT_V1), 1);
        Assert.assertEquals(countingConsumer1.getNumDataEvents(DbusEventFactory.DBUS_EVENT_V2), 1);
    } finally {
        cleanup(new DatabusRelayTestUtil.RelayRunner[] { r1 }, cr);
    }
}
Also used : ConditionCheck(com.linkedin.databus2.test.ConditionCheck) ReadableByteChannel(java.nio.channels.ReadableByteChannel) ClientRunner(com.linkedin.databus2.relay.TestDatabusRelayMain.ClientRunner) ByteBufferInputStream(org.apache.zookeeper.server.ByteBufferInputStream) Logger(org.apache.log4j.Logger) ByteBuffer(java.nio.ByteBuffer) DbusEventBuffer(com.linkedin.databus.core.DbusEventBuffer) DatabusSourcesConnection(com.linkedin.databus.client.DatabusSourcesConnection) DatabusRelayTestUtil(com.linkedin.databus2.relay.util.test.DatabusRelayTestUtil) DbusEventBufferMult(com.linkedin.databus.core.DbusEventBufferMult) PhysicalPartition(com.linkedin.databus.core.data_model.PhysicalPartition) Test(org.testng.annotations.Test)

Aggregations

DatabusException (com.linkedin.databus2.core.DatabusException)78 Test (org.testng.annotations.Test)21 ArrayList (java.util.ArrayList)20 Schema (org.apache.avro.Schema)16 IOException (java.io.IOException)14 ConditionCheck (com.linkedin.databus2.test.ConditionCheck)13 Logger (org.apache.log4j.Logger)13 InvalidConfigException (com.linkedin.databus.core.util.InvalidConfigException)12 Channel (org.jboss.netty.channel.Channel)12 VersionedSchema (com.linkedin.databus2.schemas.VersionedSchema)11 DefaultHttpRequest (org.jboss.netty.handler.codec.http.DefaultHttpRequest)11 PhysicalPartition (com.linkedin.databus.core.data_model.PhysicalPartition)10 UnsupportedKeyException (com.linkedin.databus.core.UnsupportedKeyException)9 InetSocketAddress (java.net.InetSocketAddress)9 SocketAddress (java.net.SocketAddress)9 SQLException (java.sql.SQLException)9 DefaultHttpResponse (org.jboss.netty.handler.codec.http.DefaultHttpResponse)9 HttpResponse (org.jboss.netty.handler.codec.http.HttpResponse)9 EventCreationException (com.linkedin.databus2.producers.EventCreationException)7 PhysicalSourceStaticConfig (com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig)7