use of org.apache.storm.sql.parser.SqlCreateTable in project storm by apache.
the class StormSqlImpl method submit.
@Override
public void submit(String name, Iterable<String> statements, Map<String, ?> stormConf, SubmitOptions opts, StormSubmitter.ProgressListener progressListener, String asUser) throws Exception {
Map<String, ISqlTridentDataSource> dataSources = new HashMap<>();
for (String sql : statements) {
StormParser parser = new StormParser(sql);
SqlNode node = parser.impl().parseSqlStmtEof();
if (node instanceof SqlCreateTable) {
handleCreateTableForTrident((SqlCreateTable) node, dataSources);
} else if (node instanceof SqlCreateFunction) {
handleCreateFunction((SqlCreateFunction) node);
} else {
QueryPlanner planner = new QueryPlanner(schema);
AbstractTridentProcessor processor = planner.compile(dataSources, sql);
TridentTopology topo = processor.build();
Path jarPath = null;
try {
// QueryPlanner on Trident mode configures the topology with compiled classes,
// so we need to add new classes into topology jar
// Topology will be serialized and sent to Nimbus, and deserialized and executed in workers.
jarPath = Files.createTempFile("storm-sql", ".jar");
System.setProperty("storm.jar", jarPath.toString());
packageTopology(jarPath, processor);
StormSubmitter.submitTopologyAs(name, stormConf, topo.build(), opts, progressListener, asUser);
} finally {
if (jarPath != null) {
Files.delete(jarPath);
}
}
}
}
}
use of org.apache.storm.sql.parser.SqlCreateTable in project storm by apache.
the class StormSqlImpl method updateSchema.
private List<FieldInfo> updateSchema(SqlCreateTable n) {
TableBuilderInfo builder = new TableBuilderInfo(typeFactory);
List<FieldInfo> fields = new ArrayList<>();
for (ColumnDefinition col : n.fieldList()) {
builder.field(col.name(), col.type(), col.constraint());
RelDataType dataType = col.type().deriveType(typeFactory);
Class<?> javaType = (Class<?>) typeFactory.getJavaClass(dataType);
ColumnConstraint constraint = col.constraint();
boolean isPrimary = constraint != null && constraint instanceof ColumnConstraint.PrimaryKey;
fields.add(new FieldInfo(col.name(), javaType, isPrimary));
}
if (n.parallelism() != null) {
builder.parallelismHint(n.parallelism());
}
Table table = builder.build();
schema.add(n.tableName(), table);
return fields;
}
use of org.apache.storm.sql.parser.SqlCreateTable in project storm by apache.
the class StormSqlImpl method execute.
@Override
public void execute(Iterable<String> statements, ChannelHandler result) throws Exception {
Map<String, DataSource> dataSources = new HashMap<>();
for (String sql : statements) {
StormParser parser = new StormParser(sql);
SqlNode node = parser.impl().parseSqlStmtEof();
if (node instanceof SqlCreateTable) {
handleCreateTable((SqlCreateTable) node, dataSources);
} else if (node instanceof SqlCreateFunction) {
handleCreateFunction((SqlCreateFunction) node);
} else {
FrameworkConfig config = buildFrameWorkConfig();
Planner planner = Frameworks.getPlanner(config);
SqlNode parse = planner.parse(sql);
SqlNode validate = planner.validate(parse);
RelNode tree = planner.convert(validate);
PlanCompiler compiler = new PlanCompiler(typeFactory);
AbstractValuesProcessor proc = compiler.compile(tree);
proc.initialize(dataSources, result);
}
}
}
use of org.apache.storm.sql.parser.SqlCreateTable in project storm by apache.
the class StormSqlImpl method explain.
@Override
public void explain(Iterable<String> statements) throws Exception {
for (String sql : statements) {
System.out.println("===========================================================");
System.out.println("query>");
System.out.println(sql);
System.out.println("-----------------------------------------------------------");
StormParser parser = new StormParser(sql);
SqlNode node = parser.impl().parseSqlStmtEof();
if (node instanceof SqlCreateTable) {
sqlContext.interpretCreateTable((SqlCreateTable) node);
System.out.println("No plan presented on DDL");
} else if (node instanceof SqlCreateFunction) {
sqlContext.interpretCreateFunction((SqlCreateFunction) node);
System.out.println("No plan presented on DDL");
} else {
String plan = sqlContext.explain(sql);
System.out.println("plan>");
System.out.println(plan);
}
System.out.println("===========================================================");
}
}
use of org.apache.storm.sql.parser.SqlCreateTable in project storm by apache.
the class StormSqlLocalClusterImpl method runLocal.
public void runLocal(LocalCluster localCluster, Iterable<String> statements, Predicate<Void> waitCondition, long waitTimeoutMs) throws Exception {
final Config conf = new Config();
conf.setMaxSpoutPending(20);
for (String sql : statements) {
StormParser parser = new StormParser(sql);
SqlNode node = parser.impl().parseSqlStmtEof();
if (node instanceof SqlCreateTable) {
sqlContext.interpretCreateTable((SqlCreateTable) node);
} else if (node instanceof SqlCreateFunction) {
sqlContext.interpretCreateFunction((SqlCreateFunction) node);
} else {
AbstractStreamsProcessor processor = sqlContext.compileSql(sql);
StormTopology topo = processor.build();
if (processor.getClassLoaders() != null && processor.getClassLoaders().size() > 0) {
CompilingClassLoader lastClassloader = processor.getClassLoaders().get(processor.getClassLoaders().size() - 1);
Utils.setClassLoaderForJavaDeSerialize(lastClassloader);
}
try (LocalCluster.LocalTopology stormTopo = localCluster.submitTopology("storm-sql", conf, topo)) {
waitForCompletion(waitTimeoutMs, waitCondition);
} finally {
while (localCluster.getTopologySummaries().size() > 0) {
Thread.sleep(10);
}
Utils.resetClassLoaderForJavaDeSerialize();
}
}
}
}
Aggregations