use of org.apache.storm.sql.runtime.IOutputSerializer in project storm by apache.
the class MongoDataSourcesProvider method constructTrident.
@Override
public ISqlTridentDataSource constructTrident(URI uri, String inputFormatClass, String outputFormatClass, Properties properties, List<FieldInfo> fields) {
List<String> fieldNames = FieldInfoUtils.getFieldNames(fields);
IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, properties, fieldNames);
return new MongoTridentDataSource(uri.toString(), properties, serializer);
}
use of org.apache.storm.sql.runtime.IOutputSerializer in project storm by apache.
the class HdfsDataSourcesProvider method constructStreams.
@Override
public ISqlStreamsDataSource constructStreams(URI uri, String inputFormatClass, String outputFormatClass, Properties properties, List<FieldInfo> fields) {
List<String> fieldNames = FieldInfoUtils.getFieldNames(fields);
IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, properties, fieldNames);
return new HdfsStreamsDataSource(uri.toString(), properties, serializer);
}
use of org.apache.storm.sql.runtime.IOutputSerializer in project storm by apache.
the class KafkaDataSourcesProvider method constructStreams.
@Override
public ISqlStreamsDataSource constructStreams(URI uri, String inputFormatClass, String outputFormatClass, Properties properties, List<FieldInfo> fields) {
List<String> fieldNames = new ArrayList<>();
int primaryIndex = -1;
for (int i = 0; i < fields.size(); ++i) {
FieldInfo f = fields.get(i);
fieldNames.add(f.name());
if (f.isPrimary()) {
primaryIndex = i;
}
}
Preconditions.checkState(primaryIndex != -1, "Kafka stream table must have a primary key");
Scheme scheme = SerdeUtils.getScheme(inputFormatClass, properties, fieldNames);
Map<String, String> values = parseUriParams(uri.getQuery());
String bootstrapServers = values.get(URI_PARAMS_BOOTSTRAP_SERVERS);
Preconditions.checkNotNull(bootstrapServers, "bootstrap-servers must be specified");
String topic = uri.getHost();
KafkaSpoutConfig<ByteBuffer, ByteBuffer> kafkaSpoutConfig = new KafkaSpoutConfig.Builder<ByteBuffer, ByteBuffer>(bootstrapServers, topic).setProp(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class).setProp(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class).setProp(ConsumerConfig.GROUP_ID_CONFIG, "storm-sql-kafka-" + UUID.randomUUID().toString()).setRecordTranslator(new RecordTranslatorSchemeAdapter(scheme)).build();
IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, properties, fieldNames);
return new KafkaStreamsDataSource(kafkaSpoutConfig, bootstrapServers, topic, properties, serializer);
}
use of org.apache.storm.sql.runtime.IOutputSerializer in project storm by apache.
the class RedisDataSourcesProvider method constructStreams.
@Override
public ISqlStreamsDataSource constructStreams(URI uri, String inputFormatClass, String outputFormatClass, Properties props, List<FieldInfo> fields) {
Preconditions.checkArgument(JedisURIHelper.isValid(uri), "URI is not valid for Redis: " + uri);
String host = uri.getHost();
int port = uri.getPort() != -1 ? uri.getPort() : DEFAULT_REDIS_PORT;
int dbIdx = JedisURIHelper.getDBIndex(uri);
String password = JedisURIHelper.getPassword(uri);
int timeout = Integer.parseInt(props.getProperty(PROPERTY_REDIS_TIMEOUT, String.valueOf(DEFAULT_TIMEOUT)));
boolean clusterMode = Boolean.valueOf(props.getProperty(PROPERTY_USE_REDIS_CLUSTER, "false"));
List<String> fieldNames = FieldInfoUtils.getFieldNames(fields);
IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, props, fieldNames);
if (clusterMode) {
JedisClusterConfig config = new JedisClusterConfig.Builder().setNodes(Collections.singleton(new InetSocketAddress(host, port))).setTimeout(timeout).build();
return new RedisClusterStreamsDataSource(config, props, fields, serializer);
} else {
JedisPoolConfig config = new JedisPoolConfig(host, port, timeout, password, dbIdx);
return new RedisStreamsDataSource(config, props, fields, serializer);
}
}
use of org.apache.storm.sql.runtime.IOutputSerializer in project storm by apache.
the class SocketDataSourcesProvider method constructStreams.
@Override
public ISqlStreamsDataSource constructStreams(URI uri, String inputFormatClass, String outputFormatClass, Properties properties, List<FieldInfo> fields) {
String host = uri.getHost();
int port = uri.getPort();
if (port == -1) {
throw new RuntimeException("Port information is not available. URI: " + uri);
}
List<String> fieldNames = FieldInfoUtils.getFieldNames(fields);
Scheme scheme = SerdeUtils.getScheme(inputFormatClass, properties, fieldNames);
IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, properties, fieldNames);
return new SocketDataSourcesProvider.SocketStreamsDataSource(host, port, scheme, serializer);
}
Aggregations