use of org.apache.storm.sql.parser.SqlCreateFunction in project storm by apache.
the class StormSqlImpl method submit.
@Override
public void submit(String name, Iterable<String> statements, Map<String, ?> stormConf, SubmitOptions opts, StormSubmitter.ProgressListener progressListener, String asUser) throws Exception {
Map<String, ISqlTridentDataSource> dataSources = new HashMap<>();
for (String sql : statements) {
StormParser parser = new StormParser(sql);
SqlNode node = parser.impl().parseSqlStmtEof();
if (node instanceof SqlCreateTable) {
handleCreateTableForTrident((SqlCreateTable) node, dataSources);
} else if (node instanceof SqlCreateFunction) {
handleCreateFunction((SqlCreateFunction) node);
} else {
QueryPlanner planner = new QueryPlanner(schema);
AbstractTridentProcessor processor = planner.compile(dataSources, sql);
TridentTopology topo = processor.build();
Path jarPath = null;
try {
// QueryPlanner on Trident mode configures the topology with compiled classes,
// so we need to add new classes into topology jar
// Topology will be serialized and sent to Nimbus, and deserialized and executed in workers.
jarPath = Files.createTempFile("storm-sql", ".jar");
System.setProperty("storm.jar", jarPath.toString());
packageTopology(jarPath, processor);
StormSubmitter.submitTopologyAs(name, stormConf, topo.build(), opts, progressListener, asUser);
} finally {
if (jarPath != null) {
Files.delete(jarPath);
}
}
}
}
}
use of org.apache.storm.sql.parser.SqlCreateFunction in project storm by apache.
the class StormSqlImpl method execute.
@Override
public void execute(Iterable<String> statements, ChannelHandler result) throws Exception {
Map<String, DataSource> dataSources = new HashMap<>();
for (String sql : statements) {
StormParser parser = new StormParser(sql);
SqlNode node = parser.impl().parseSqlStmtEof();
if (node instanceof SqlCreateTable) {
handleCreateTable((SqlCreateTable) node, dataSources);
} else if (node instanceof SqlCreateFunction) {
handleCreateFunction((SqlCreateFunction) node);
} else {
FrameworkConfig config = buildFrameWorkConfig();
Planner planner = Frameworks.getPlanner(config);
SqlNode parse = planner.parse(sql);
SqlNode validate = planner.validate(parse);
RelNode tree = planner.convert(validate);
PlanCompiler compiler = new PlanCompiler(typeFactory);
AbstractValuesProcessor proc = compiler.compile(tree);
proc.initialize(dataSources, result);
}
}
}
use of org.apache.storm.sql.parser.SqlCreateFunction in project storm by apache.
the class StormSqlImpl method explain.
@Override
public void explain(Iterable<String> statements) throws Exception {
for (String sql : statements) {
System.out.println("===========================================================");
System.out.println("query>");
System.out.println(sql);
System.out.println("-----------------------------------------------------------");
StormParser parser = new StormParser(sql);
SqlNode node = parser.impl().parseSqlStmtEof();
if (node instanceof SqlCreateTable) {
sqlContext.interpretCreateTable((SqlCreateTable) node);
System.out.println("No plan presented on DDL");
} else if (node instanceof SqlCreateFunction) {
sqlContext.interpretCreateFunction((SqlCreateFunction) node);
System.out.println("No plan presented on DDL");
} else {
String plan = sqlContext.explain(sql);
System.out.println("plan>");
System.out.println(plan);
}
System.out.println("===========================================================");
}
}
use of org.apache.storm.sql.parser.SqlCreateFunction in project storm by apache.
the class StormSqlLocalClusterImpl method runLocal.
public void runLocal(LocalCluster localCluster, Iterable<String> statements, Predicate<Void> waitCondition, long waitTimeoutMs) throws Exception {
final Config conf = new Config();
conf.setMaxSpoutPending(20);
for (String sql : statements) {
StormParser parser = new StormParser(sql);
SqlNode node = parser.impl().parseSqlStmtEof();
if (node instanceof SqlCreateTable) {
sqlContext.interpretCreateTable((SqlCreateTable) node);
} else if (node instanceof SqlCreateFunction) {
sqlContext.interpretCreateFunction((SqlCreateFunction) node);
} else {
AbstractStreamsProcessor processor = sqlContext.compileSql(sql);
StormTopology topo = processor.build();
if (processor.getClassLoaders() != null && processor.getClassLoaders().size() > 0) {
CompilingClassLoader lastClassloader = processor.getClassLoaders().get(processor.getClassLoaders().size() - 1);
Utils.setClassLoaderForJavaDeSerialize(lastClassloader);
}
try (LocalCluster.LocalTopology stormTopo = localCluster.submitTopology("storm-sql", conf, topo)) {
waitForCompletion(waitTimeoutMs, waitCondition);
} finally {
while (localCluster.getTopologySummaries().size() > 0) {
Thread.sleep(10);
}
Utils.resetClassLoaderForJavaDeSerialize();
}
}
}
}
use of org.apache.storm.sql.parser.SqlCreateFunction in project storm by apache.
the class StormSqlImpl method handleCreateFunction.
private void handleCreateFunction(SqlCreateFunction sqlCreateFunction) throws ClassNotFoundException {
if (sqlCreateFunction.jarName() != null) {
throw new UnsupportedOperationException("UDF 'USING JAR' not implemented");
}
Method method;
Function function;
if ((method = findMethod(sqlCreateFunction.className(), "evaluate")) != null) {
function = ScalarFunctionImpl.create(method);
} else if (findMethod(sqlCreateFunction.className(), "add") != null) {
function = AggregateFunctionImpl.create(Class.forName(sqlCreateFunction.className()));
} else {
throw new RuntimeException("Invalid scalar or aggregate function");
}
schema.add(sqlCreateFunction.functionName().toUpperCase(), function);
hasUdf = true;
}
Aggregations