Search in sources :

Example 6 with Field

use of io.openmessaging.connector.api.data.Field in project rocketmq-externals by apache.

the class MetaSourceTask method poll.

@Override
public Collection<SourceDataEntry> poll() {
    log.debug("polling...");
    List<String> groups = JSONObject.parseArray(this.config.getTaskGroupList(), String.class);
    if (groups == null) {
        log.info("no group in task.");
        try {
            Thread.sleep(TimeUnit.SECONDS.toMillis(10));
        } catch (InterruptedException e) {
            throw new IllegalStateException(e);
        }
        return Collections.emptyList();
    }
    List<SourceDataEntry> res = new ArrayList<>();
    for (String group : groups) {
        ConsumeStats stats;
        try {
            stats = this.srcMQAdminExt.examineConsumeStats(group);
        } catch (Exception e) {
            log.error("admin get consumer info failed for consumer groups: " + group, e);
            continue;
        }
        for (Map.Entry<MessageQueue, OffsetWrapper> offsetTable : stats.getOffsetTable().entrySet()) {
            MessageQueue mq = offsetTable.getKey();
            long srcOffset = offsetTable.getValue().getConsumerOffset();
            long targetOffset = this.store.convertTargetOffset(mq, group, srcOffset);
            JSONObject jsonObject = new JSONObject();
            jsonObject.put(RmqConstants.NEXT_POSITION, srcOffset);
            Schema schema = new Schema();
            schema.setDataSource(this.config.getSourceRocketmq());
            schema.setName(mq.getTopic());
            schema.setFields(new ArrayList<>());
            schema.getFields().add(new Field(0, FieldName.OFFSET.getKey(), FieldType.INT64));
            DataEntryBuilder dataEntryBuilder = new DataEntryBuilder(schema);
            dataEntryBuilder.timestamp(System.currentTimeMillis()).queue(this.config.getStoreTopic()).entryType(EntryType.UPDATE);
            dataEntryBuilder.putFiled(FieldName.OFFSET.getKey(), targetOffset);
            SourceDataEntry sourceDataEntry = dataEntryBuilder.buildSourceDataEntry(ByteBuffer.wrap(RmqConstants.getPartition(mq.getTopic(), mq.getBrokerName(), String.valueOf(mq.getQueueId())).getBytes(StandardCharsets.UTF_8)), ByteBuffer.wrap(jsonObject.toJSONString().getBytes(StandardCharsets.UTF_8)));
            String targetTopic = new StringBuilder().append(group).append("-").append(mq.getTopic()).append("-").append(mq.getQueueId()).toString();
            sourceDataEntry.setQueueName(targetTopic);
            res.add(sourceDataEntry);
        }
    }
    return res;
}
Also used : SourceDataEntry(io.openmessaging.connector.api.data.SourceDataEntry) ConsumeStats(org.apache.rocketmq.common.admin.ConsumeStats) Schema(io.openmessaging.connector.api.data.Schema) ArrayList(java.util.ArrayList) DataEntryBuilder(io.openmessaging.connector.api.data.DataEntryBuilder) MQClientException(org.apache.rocketmq.client.exception.MQClientException) OffsetWrapper(org.apache.rocketmq.common.admin.OffsetWrapper) Field(io.openmessaging.connector.api.data.Field) MessageQueue(org.apache.rocketmq.common.message.MessageQueue) JSONObject(com.alibaba.fastjson.JSONObject) Map(java.util.Map)

Example 7 with Field

use of io.openmessaging.connector.api.data.Field in project rocketmq-externals by apache.

the class FileSourceTask method poll.

@Override
public Collection<SourceDataEntry> poll() {
    log.info("Start a poll stream is null:{}", stream == null);
    if (stream == null) {
        try {
            stream = Files.newInputStream(Paths.get(fileConfig.getFilename()));
            ByteBuffer positionInfo;
            positionInfo = this.context.positionStorageReader().getPosition(ByteBuffer.wrap(FileConstants.getPartition(fileConfig.getFilename()).getBytes(Charset.defaultCharset())));
            if (positionInfo != null) {
                log.info("positionInfo is not null!");
                String positionJson = new String(positionInfo.array(), Charset.defaultCharset());
                JSONObject jsonObject = JSONObject.parseObject(positionJson);
                Object lastRecordedOffset = jsonObject.getLong(FileConstants.NEXT_POSITION);
                if (lastRecordedOffset != null && !(lastRecordedOffset instanceof Long))
                    throw new ConnectException(-1, "Offset position is the incorrect type");
                if (lastRecordedOffset != null) {
                    log.debug("Found previous offset, trying to skip to file offset {}", lastRecordedOffset);
                    long skipLeft = (Long) lastRecordedOffset;
                    while (skipLeft > 0) {
                        try {
                            long skipped = stream.skip(skipLeft);
                            skipLeft -= skipped;
                        } catch (IOException e) {
                            log.error("Error while trying to seek to previous offset in file {}: ", fileConfig.getFilename(), e);
                            throw new ConnectException(-1, e);
                        }
                    }
                    log.debug("Skipped to offset {}", lastRecordedOffset);
                }
                streamOffset = (lastRecordedOffset != null) ? (Long) lastRecordedOffset : 0L;
            } else {
                log.info("positionInfo is null!");
                streamOffset = 0L;
            }
            reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8));
            log.debug("Opened {} for reading", logFilename());
        } catch (NoSuchFileException e) {
            log.warn("Couldn't find file {} for FileStreamSourceTask, sleeping to wait for it to be created", logFilename());
            synchronized (this) {
                try {
                    this.wait(1000);
                } catch (InterruptedException e1) {
                    log.error("Interrupt error .", e1);
                }
            }
            return null;
        } catch (IOException e) {
            log.error("Error while trying to open file {}: ", fileConfig.getFilename(), e);
            throw new ConnectException(-1, e);
        }
    }
    try {
        final BufferedReader readerCopy;
        synchronized (this) {
            readerCopy = reader;
        }
        if (readerCopy == null) {
            return null;
        }
        Collection<SourceDataEntry> records = null;
        int nread = 0;
        while (readerCopy.ready()) {
            nread = readerCopy.read(buffer, offset, buffer.length - offset);
            log.trace("Read {} bytes from {}", nread, logFilename());
            if (nread > 0) {
                offset += nread;
                if (offset == buffer.length) {
                    char[] newbuf = new char[buffer.length * 2];
                    System.arraycopy(buffer, 0, newbuf, 0, buffer.length);
                    buffer = newbuf;
                }
                String line;
                do {
                    line = extractLine();
                    if (line != null) {
                        log.trace("Read a line from {}", logFilename());
                        if (records == null) {
                            records = new ArrayList<>();
                        }
                        Schema schema = new Schema();
                        schema.setDataSource(fileConfig.getFilename());
                        schema.setName(fileConfig.getFilename() + LINE);
                        final Field field = new Field(0, FileConstants.FILE_LINE_CONTENT, FieldType.STRING);
                        List<Field> fields = new ArrayList<Field>() {

                            {
                                add(field);
                            }
                        };
                        schema.setFields(fields);
                        DataEntryBuilder dataEntryBuilder = new DataEntryBuilder(schema).entryType(EntryType.CREATE).queue(fileConfig.getTopic()).timestamp(System.currentTimeMillis()).putFiled(FileConstants.FILE_LINE_CONTENT, line);
                        final SourceDataEntry sourceDataEntry = dataEntryBuilder.buildSourceDataEntry(offsetKey(FileConstants.getPartition(fileConfig.getFilename())), offsetValue(streamOffset));
                        records.add(sourceDataEntry);
                        if (records.size() >= batchSize) {
                            return records;
                        }
                    }
                } while (line != null);
            }
        }
        if (nread <= 0) {
            synchronized (this) {
                this.wait(1000);
            }
        }
        return records;
    } catch (IOException e) {
    } catch (InterruptedException e) {
        log.error("Interrupt error .", e);
    }
    return null;
}
Also used : SourceDataEntry(io.openmessaging.connector.api.data.SourceDataEntry) InputStreamReader(java.io.InputStreamReader) Schema(io.openmessaging.connector.api.data.Schema) NoSuchFileException(java.nio.file.NoSuchFileException) ArrayList(java.util.ArrayList) IOException(java.io.IOException) DataEntryBuilder(io.openmessaging.connector.api.data.DataEntryBuilder) ByteBuffer(java.nio.ByteBuffer) Field(io.openmessaging.connector.api.data.Field) JSONObject(com.alibaba.fastjson.JSONObject) BufferedReader(java.io.BufferedReader) JSONObject(com.alibaba.fastjson.JSONObject) ConnectException(io.openmessaging.connector.api.exception.ConnectException)

Example 8 with Field

use of io.openmessaging.connector.api.data.Field in project rocketmq-externals by apache.

the class WorkerSinkTask method convertToSinkDataEntry.

private SinkDataEntry convertToSinkDataEntry(MessageExt message) {
    Map<String, String> properties = message.getProperties();
    String queueName;
    EntryType entryType;
    Schema schema;
    Long timestamp;
    Object[] datas = new Object[1];
    if (null == recordConverter || recordConverter instanceof RocketMQConverter) {
        queueName = properties.get(RuntimeConfigDefine.CONNECT_TOPICNAME);
        String connectEntryType = properties.get(RuntimeConfigDefine.CONNECT_ENTRYTYPE);
        entryType = StringUtils.isNotEmpty(connectEntryType) ? EntryType.valueOf(connectEntryType) : null;
        String connectTimestamp = properties.get(RuntimeConfigDefine.CONNECT_TIMESTAMP);
        timestamp = StringUtils.isNotEmpty(connectTimestamp) ? Long.valueOf(connectTimestamp) : null;
        String connectSchema = properties.get(RuntimeConfigDefine.CONNECT_SCHEMA);
        schema = StringUtils.isNotEmpty(connectSchema) ? JSON.parseObject(connectSchema, Schema.class) : null;
        datas = new Object[1];
        datas[0] = message.getBody();
    } else {
        final byte[] messageBody = message.getBody();
        final SourceDataEntry sourceDataEntry = JSON.parseObject(new String(messageBody), SourceDataEntry.class);
        final Object[] payload = sourceDataEntry.getPayload();
        final byte[] decodeBytes = Base64.getDecoder().decode((String) payload[0]);
        Object recodeObject;
        if (recordConverter instanceof JsonConverter) {
            JsonConverter jsonConverter = (JsonConverter) recordConverter;
            jsonConverter.setClazz(Object[].class);
            recodeObject = recordConverter.byteToObject(decodeBytes);
            datas = (Object[]) recodeObject;
        }
        schema = sourceDataEntry.getSchema();
        entryType = sourceDataEntry.getEntryType();
        queueName = sourceDataEntry.getQueueName();
        timestamp = sourceDataEntry.getTimestamp();
    }
    DataEntryBuilder dataEntryBuilder = new DataEntryBuilder(schema);
    dataEntryBuilder.entryType(entryType);
    dataEntryBuilder.queue(queueName);
    dataEntryBuilder.timestamp(timestamp);
    List<Field> fields = schema.getFields();
    if (null != fields && !fields.isEmpty()) {
        for (Field field : fields) {
            dataEntryBuilder.putFiled(field.getName(), datas[field.getIndex()]);
        }
    }
    SinkDataEntry sinkDataEntry = dataEntryBuilder.buildSinkDataEntry(message.getQueueOffset());
    return sinkDataEntry;
}
Also used : SourceDataEntry(io.openmessaging.connector.api.data.SourceDataEntry) JsonConverter(org.apache.rocketmq.connect.runtime.converter.JsonConverter) Schema(io.openmessaging.connector.api.data.Schema) DataEntryBuilder(io.openmessaging.connector.api.data.DataEntryBuilder) RocketMQConverter(org.apache.rocketmq.connect.runtime.converter.RocketMQConverter) Field(io.openmessaging.connector.api.data.Field) SinkDataEntry(io.openmessaging.connector.api.data.SinkDataEntry) EntryType(io.openmessaging.connector.api.data.EntryType)

Example 9 with Field

use of io.openmessaging.connector.api.data.Field in project rocketmq-externals by apache.

the class Updater method queryBeforeUpdateRowId.

private Integer queryBeforeUpdateRowId(String dbName, String tableName, Map<Field, Object[]> fieldMap) {
    int count = 0, id = 0;
    ResultSet rs;
    PreparedStatement stmt;
    Boolean finishQuery = false;
    String query = "select id from " + dbName + "." + tableName + " where 1=1";
    for (Map.Entry<Field, Object[]> entry : fieldMap.entrySet()) {
        count++;
        String fieldName = entry.getKey().getName();
        FieldType fieldType = entry.getKey().getType();
        Object fieldValue = entry.getValue()[0];
        if ("id".equals(fieldName))
            continue;
        if (count <= fieldMap.size()) {
            query += " and ";
        }
        if (fieldValue == null) {
            query += fieldName + " is NULL";
        } else {
            query = typeParser(fieldType, fieldName, fieldValue, query);
        }
    }
    try {
        while (!connection.isClosed() && !finishQuery) {
            stmt = connection.prepareStatement(query);
            rs = stmt.executeQuery();
            if (rs != null) {
                while (rs.next()) {
                    id = rs.getInt("id");
                }
                finishQuery = true;
                rs.close();
            }
        }
    } catch (SQLException e) {
        log.error("query table error,{}", e);
    }
    return id;
}
Also used : Field(io.openmessaging.connector.api.data.Field) SQLException(java.sql.SQLException) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement) Map(java.util.Map) FieldType(io.openmessaging.connector.api.data.FieldType)

Example 10 with Field

use of io.openmessaging.connector.api.data.Field in project rocketmq-externals by apache.

the class JdbcSinkTask method put.

@Override
public void put(Collection<SinkDataEntry> sinkDataEntries) {
    try {
        if (tableQueue.size() > 1) {
            updater = tableQueue.poll(1000, TimeUnit.MILLISECONDS);
        } else {
            updater = tableQueue.peek();
        }
        for (SinkDataEntry record : sinkDataEntries) {
            Map<Field, Object[]> fieldMap = new HashMap<>();
            Object[] payloads = record.getPayload();
            Schema schema = record.getSchema();
            EntryType entryType = record.getEntryType();
            String tableName = schema.getName();
            String dbName = schema.getDataSource();
            List<Field> fields = schema.getFields();
            Boolean parseError = false;
            if (!fields.isEmpty()) {
                for (Field field : fields) {
                    Object fieldValue = payloads[field.getIndex()];
                    Object[] value = JSONObject.parseArray((String) fieldValue).toArray();
                    if (value.length == 2) {
                        fieldMap.put(field, value);
                    } else {
                        log.error("parseArray error, fieldValue:{}", fieldValue);
                        parseError = true;
                    }
                }
            }
            if (!parseError) {
                Boolean isSuccess = updater.push(dbName, tableName, fieldMap, entryType);
                if (!isSuccess) {
                    log.error("push data error, dbName:{}, tableName:{}, entryType:{}, fieldMap:{}", dbName, tableName, fieldMap, entryType);
                }
            }
        }
    } catch (Exception e) {
        log.error("put sinkDataEntries error, {}", e);
    }
}
Also used : Field(io.openmessaging.connector.api.data.Field) SinkDataEntry(io.openmessaging.connector.api.data.SinkDataEntry) EntryType(io.openmessaging.connector.api.data.EntryType) HashMap(java.util.HashMap) Schema(io.openmessaging.connector.api.data.Schema) JSONObject(com.alibaba.fastjson.JSONObject)

Aggregations

Field (io.openmessaging.connector.api.data.Field)16 Schema (io.openmessaging.connector.api.data.Schema)9 JSONObject (com.alibaba.fastjson.JSONObject)6 FieldType (io.openmessaging.connector.api.data.FieldType)6 Map (java.util.Map)6 DataEntryBuilder (io.openmessaging.connector.api.data.DataEntryBuilder)5 SourceDataEntry (io.openmessaging.connector.api.data.SourceDataEntry)5 SinkDataEntry (io.openmessaging.connector.api.data.SinkDataEntry)4 ArrayList (java.util.ArrayList)4 EntryType (io.openmessaging.connector.api.data.EntryType)3 PreparedStatement (java.sql.PreparedStatement)3 SQLException (java.sql.SQLException)3 ResultSet (com.datastax.oss.driver.api.core.cql.ResultSet)2 SimpleStatement (com.datastax.oss.driver.api.core.cql.SimpleStatement)2 ResultSet (java.sql.ResultSet)2 HashMap (java.util.HashMap)2 Delete (com.datastax.oss.driver.api.querybuilder.delete.Delete)1 DeleteSelection (com.datastax.oss.driver.api.querybuilder.delete.DeleteSelection)1 InsertInto (com.datastax.oss.driver.api.querybuilder.insert.InsertInto)1 RegularInsert (com.datastax.oss.driver.api.querybuilder.insert.RegularInsert)1