use of io.openmessaging.connector.api.exception.ConnectException in project rocketmq-externals by apache.
the class ClusterManagementServiceImpl method start.
@Override
public void start() {
try {
this.defaultMQPullConsumer.start();
} catch (MQClientException e) {
log.error("Start RocketMQ consumer for cluster management service error", e);
throw new ConnectException(-1, "Start RocketMQ consumer for cluster management service error");
}
WorkerChangeListener workerChangeListener = new WorkerChangeListener();
this.defaultMQPullConsumer.getDefaultMQPullConsumerImpl().getRebalanceImpl().getmQClientFactory().getMQClientAPIImpl().getRemotingClient().registerProcessor(RequestCode.NOTIFY_CONSUMER_IDS_CHANGED, workerChangeListener, null);
}
use of io.openmessaging.connector.api.exception.ConnectException in project rocketmq-externals by apache.
the class FileSinkTask method start.
@Override
public void start(KeyValue props) {
fileConfig = new FileConfig();
fileConfig.load(props);
if (fileConfig.getFilename() == null || fileConfig.getFilename().isEmpty()) {
outputStream = System.out;
} else {
try {
outputStream = new PrintStream(Files.newOutputStream(Paths.get(fileConfig.getFilename()), StandardOpenOption.CREATE, StandardOpenOption.APPEND), false, StandardCharsets.UTF_8.name());
} catch (IOException e) {
throw new ConnectException(-1, "Couldn't find or create file '" + fileConfig.getFilename() + "' for FileStreamSinkTask", e);
}
}
}
use of io.openmessaging.connector.api.exception.ConnectException in project rocketmq-externals by apache.
the class FileSourceTask method poll.
@Override
public Collection<SourceDataEntry> poll() {
log.info("Start a poll stream is null:{}", stream == null);
if (stream == null) {
try {
stream = Files.newInputStream(Paths.get(fileConfig.getFilename()));
ByteBuffer positionInfo;
positionInfo = this.context.positionStorageReader().getPosition(ByteBuffer.wrap(FileConstants.getPartition(fileConfig.getFilename()).getBytes(Charset.defaultCharset())));
if (positionInfo != null) {
log.info("positionInfo is not null!");
String positionJson = new String(positionInfo.array(), Charset.defaultCharset());
JSONObject jsonObject = JSONObject.parseObject(positionJson);
Object lastRecordedOffset = jsonObject.getLong(FileConstants.NEXT_POSITION);
if (lastRecordedOffset != null && !(lastRecordedOffset instanceof Long))
throw new ConnectException(-1, "Offset position is the incorrect type");
if (lastRecordedOffset != null) {
log.debug("Found previous offset, trying to skip to file offset {}", lastRecordedOffset);
long skipLeft = (Long) lastRecordedOffset;
while (skipLeft > 0) {
try {
long skipped = stream.skip(skipLeft);
skipLeft -= skipped;
} catch (IOException e) {
log.error("Error while trying to seek to previous offset in file {}: ", fileConfig.getFilename(), e);
throw new ConnectException(-1, e);
}
}
log.debug("Skipped to offset {}", lastRecordedOffset);
}
streamOffset = (lastRecordedOffset != null) ? (Long) lastRecordedOffset : 0L;
} else {
log.info("positionInfo is null!");
streamOffset = 0L;
}
reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8));
log.debug("Opened {} for reading", logFilename());
} catch (NoSuchFileException e) {
log.warn("Couldn't find file {} for FileStreamSourceTask, sleeping to wait for it to be created", logFilename());
synchronized (this) {
try {
this.wait(1000);
} catch (InterruptedException e1) {
log.error("Interrupt error .", e1);
}
}
return null;
} catch (IOException e) {
log.error("Error while trying to open file {}: ", fileConfig.getFilename(), e);
throw new ConnectException(-1, e);
}
}
try {
final BufferedReader readerCopy;
synchronized (this) {
readerCopy = reader;
}
if (readerCopy == null) {
return null;
}
Collection<SourceDataEntry> records = null;
int nread = 0;
while (readerCopy.ready()) {
nread = readerCopy.read(buffer, offset, buffer.length - offset);
log.trace("Read {} bytes from {}", nread, logFilename());
if (nread > 0) {
offset += nread;
if (offset == buffer.length) {
char[] newbuf = new char[buffer.length * 2];
System.arraycopy(buffer, 0, newbuf, 0, buffer.length);
buffer = newbuf;
}
String line;
do {
line = extractLine();
if (line != null) {
log.trace("Read a line from {}", logFilename());
if (records == null) {
records = new ArrayList<>();
}
Schema schema = new Schema();
schema.setDataSource(fileConfig.getFilename());
schema.setName(fileConfig.getFilename() + LINE);
final Field field = new Field(0, FileConstants.FILE_LINE_CONTENT, FieldType.STRING);
List<Field> fields = new ArrayList<Field>() {
{
add(field);
}
};
schema.setFields(fields);
DataEntryBuilder dataEntryBuilder = new DataEntryBuilder(schema).entryType(EntryType.CREATE).queue(fileConfig.getTopic()).timestamp(System.currentTimeMillis()).putFiled(FileConstants.FILE_LINE_CONTENT, line);
final SourceDataEntry sourceDataEntry = dataEntryBuilder.buildSourceDataEntry(offsetKey(FileConstants.getPartition(fileConfig.getFilename())), offsetValue(streamOffset));
records.add(sourceDataEntry);
if (records.size() >= batchSize) {
return records;
}
}
} while (line != null);
}
}
if (nread <= 0) {
synchronized (this) {
this.wait(1000);
}
}
return records;
} catch (IOException e) {
} catch (InterruptedException e) {
log.error("Interrupt error .", e);
}
return null;
}
Aggregations