use of io.confluent.connect.s3.storage.S3Storage in project kafka-connect-storage-cloud by confluentinc.
the class DataWriterJsonTest method setUp.
// @Before should be omitted in order to be able to add properties per test.
public void setUp() throws Exception {
super.setUp();
converter = new JsonConverter();
converter.configure(Collections.singletonMap("schemas.enable", "false"), false);
s3 = newS3Client(connectorConfig);
storage = new S3Storage(connectorConfig, url, S3_TEST_BUCKET_NAME, s3);
partitioner = new DefaultPartitioner<>();
partitioner.configure(parsedConfig);
format = new JsonFormat(storage);
s3.createBucket(S3_TEST_BUCKET_NAME);
assertTrue(s3.doesBucketExist(S3_TEST_BUCKET_NAME));
}
use of io.confluent.connect.s3.storage.S3Storage in project kafka-connect-storage-cloud by confluentinc.
the class S3SinkTask method start.
public void start(Map<String, String> props) {
try {
connectorConfig = new S3SinkConnectorConfig(props);
url = connectorConfig.getString(StorageCommonConfig.STORE_URL_CONFIG);
@SuppressWarnings("unchecked") Class<? extends S3Storage> storageClass = (Class<? extends S3Storage>) connectorConfig.getClass(StorageCommonConfig.STORAGE_CLASS_CONFIG);
storage = StorageFactory.createStorage(storageClass, S3SinkConnectorConfig.class, connectorConfig, url);
if (!storage.bucketExists()) {
throw new DataException("No-existent S3 bucket: " + connectorConfig.getBucketName());
}
writerProvider = newFormat().getRecordWriterProvider();
partitioner = newPartitioner(connectorConfig);
open(context.assignment());
log.info("Started S3 connector task with assigned partitions: {}", assignment);
} catch (ClassNotFoundException | IllegalAccessException | InstantiationException | InvocationTargetException | NoSuchMethodException e) {
throw new ConnectException("Reflection exception: ", e);
} catch (AmazonClientException e) {
throw new ConnectException(e);
}
}
use of io.confluent.connect.s3.storage.S3Storage in project kafka-connect-storage-cloud by confluentinc.
the class DataWriterByteArrayTest method setUp.
// @Before should be omitted in order to be able to add properties per test.
public void setUp() throws Exception {
super.setUp();
converter = new ByteArrayConverter();
s3 = newS3Client(connectorConfig);
storage = new S3Storage(connectorConfig, url, S3_TEST_BUCKET_NAME, s3);
partitioner = new DefaultPartitioner<>();
partitioner.configure(parsedConfig);
format = new ByteArrayFormat(storage);
s3.createBucket(S3_TEST_BUCKET_NAME);
assertTrue(s3.doesBucketExist(S3_TEST_BUCKET_NAME));
}
use of io.confluent.connect.s3.storage.S3Storage in project kafka-connect-storage-cloud by confluentinc.
the class S3ProxyTest method setUp.
@Before
public void setUp() throws Exception {
super.setUp();
storage = new S3Storage(connectorConfig, url, S3_TEST_BUCKET_NAME, null);
}
use of io.confluent.connect.s3.storage.S3Storage in project kafka-connect-storage-cloud by confluentinc.
the class DataWriterAvroTest method setUp.
// @Before should be omitted in order to be able to add properties per test.
public void setUp() throws Exception {
super.setUp();
s3 = PowerMockito.spy(newS3Client(connectorConfig));
storage = new S3Storage(connectorConfig, url, S3_TEST_BUCKET_NAME, s3);
partitioner = new DefaultPartitioner<>();
partitioner.configure(parsedConfig);
format = new AvroFormat(storage);
s3.createBucket(S3_TEST_BUCKET_NAME);
assertTrue(s3.doesBucketExist(S3_TEST_BUCKET_NAME));
}
Aggregations