use of org.apache.asterix.lang.common.base.Statement in project asterixdb by apache.
the class QueryTranslator method compileAndExecute.
@Override
public void compileAndExecute(IHyracksClientConnection hcc, IHyracksDataset hdc, ResultDelivery resultDelivery, ResultMetadata outMetadata, Stats stats, String clientContextId, IStatementExecutorContext ctx) throws Exception {
int resultSetIdCounter = 0;
FileSplit outputFile = null;
IAWriterFactory writerFactory = PrinterBasedWriterFactory.INSTANCE;
IResultSerializerFactoryProvider resultSerializerFactoryProvider = ResultSerializerFactoryProvider.INSTANCE;
Map<String, String> config = new HashMap<>();
/* Since the system runs a large number of threads, when HTTP requests don't return, it becomes difficult to
* find the thread running the request to determine where it has stopped.
* Setting the thread name helps make that easier
*/
String threadName = Thread.currentThread().getName();
Thread.currentThread().setName(QueryTranslator.class.getSimpleName());
try {
for (Statement stmt : statements) {
if (sessionConfig.is(SessionConfig.FORMAT_HTML)) {
sessionOutput.out().println(ApiServlet.HTML_STATEMENT_SEPARATOR);
}
validateOperation(appCtx, activeDataverse, stmt);
// Rewrite the statement's AST.
rewriteStatement(stmt);
MetadataProvider metadataProvider = new MetadataProvider(appCtx, activeDataverse, componentProvider);
metadataProvider.setWriterFactory(writerFactory);
metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider);
metadataProvider.setOutputFile(outputFile);
metadataProvider.setConfig(config);
switch(stmt.getKind()) {
case Statement.Kind.SET:
handleSetStatement(stmt, config);
break;
case Statement.Kind.DATAVERSE_DECL:
activeDataverse = handleUseDataverseStatement(metadataProvider, stmt);
break;
case Statement.Kind.CREATE_DATAVERSE:
handleCreateDataverseStatement(metadataProvider, stmt);
break;
case Statement.Kind.DATASET_DECL:
handleCreateDatasetStatement(metadataProvider, stmt, hcc);
break;
case Statement.Kind.CREATE_INDEX:
handleCreateIndexStatement(metadataProvider, stmt, hcc);
break;
case Statement.Kind.TYPE_DECL:
handleCreateTypeStatement(metadataProvider, stmt);
break;
case Statement.Kind.NODEGROUP_DECL:
handleCreateNodeGroupStatement(metadataProvider, stmt);
break;
case Statement.Kind.DATAVERSE_DROP:
handleDataverseDropStatement(metadataProvider, stmt, hcc);
break;
case Statement.Kind.DATASET_DROP:
handleDatasetDropStatement(metadataProvider, stmt, hcc);
break;
case Statement.Kind.INDEX_DROP:
handleIndexDropStatement(metadataProvider, stmt, hcc);
break;
case Statement.Kind.TYPE_DROP:
handleTypeDropStatement(metadataProvider, stmt);
break;
case Statement.Kind.NODEGROUP_DROP:
handleNodegroupDropStatement(metadataProvider, stmt);
break;
case Statement.Kind.CREATE_FUNCTION:
handleCreateFunctionStatement(metadataProvider, stmt);
break;
case Statement.Kind.FUNCTION_DROP:
handleFunctionDropStatement(metadataProvider, stmt);
break;
case Statement.Kind.LOAD:
handleLoadStatement(metadataProvider, stmt, hcc);
break;
case Statement.Kind.INSERT:
case Statement.Kind.UPSERT:
if (((InsertStatement) stmt).getReturnExpression() != null) {
metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++));
metadataProvider.setResultAsyncMode(resultDelivery == ResultDelivery.ASYNC || resultDelivery == ResultDelivery.DEFERRED);
}
handleInsertUpsertStatement(metadataProvider, stmt, hcc, hdc, resultDelivery, outMetadata, stats, false, clientContextId, ctx);
break;
case Statement.Kind.DELETE:
handleDeleteStatement(metadataProvider, stmt, hcc, false);
break;
case Statement.Kind.CREATE_FEED:
handleCreateFeedStatement(metadataProvider, stmt);
break;
case Statement.Kind.DROP_FEED:
handleDropFeedStatement(metadataProvider, stmt, hcc);
break;
case Statement.Kind.DROP_FEED_POLICY:
handleDropFeedPolicyStatement(metadataProvider, stmt);
break;
case Statement.Kind.CONNECT_FEED:
handleConnectFeedStatement(metadataProvider, stmt);
break;
case Statement.Kind.DISCONNECT_FEED:
handleDisconnectFeedStatement(metadataProvider, stmt);
break;
case Statement.Kind.START_FEED:
handleStartFeedStatement(metadataProvider, stmt, hcc);
break;
case Statement.Kind.STOP_FEED:
handleStopFeedStatement(metadataProvider, stmt);
break;
case Statement.Kind.CREATE_FEED_POLICY:
handleCreateFeedPolicyStatement(metadataProvider, stmt);
break;
case Statement.Kind.QUERY:
metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++));
metadataProvider.setResultAsyncMode(resultDelivery == ResultDelivery.ASYNC || resultDelivery == ResultDelivery.DEFERRED);
handleQuery(metadataProvider, (Query) stmt, hcc, hdc, resultDelivery, outMetadata, stats, clientContextId, ctx);
break;
case Statement.Kind.COMPACT:
handleCompactStatement(metadataProvider, stmt, hcc);
break;
case Statement.Kind.EXTERNAL_DATASET_REFRESH:
handleExternalDatasetRefreshStatement(metadataProvider, stmt, hcc);
break;
case Statement.Kind.WRITE:
Pair<IAWriterFactory, FileSplit> result = handleWriteStatement(stmt);
writerFactory = (result.first != null) ? result.first : writerFactory;
outputFile = result.second;
break;
case Statement.Kind.RUN:
handleRunStatement(metadataProvider, stmt, hcc);
break;
case Statement.Kind.FUNCTION_DECL:
// No op
break;
case Statement.Kind.EXTENSION:
((IExtensionStatement) stmt).handle(this, metadataProvider, hcc, hdc, resultDelivery, stats, resultSetIdCounter);
break;
default:
throw new CompilationException("Unknown function");
}
}
} finally {
Thread.currentThread().setName(threadName);
}
}
use of org.apache.asterix.lang.common.base.Statement in project asterixdb by apache.
the class ExecuteStatementRequestMessage method handle.
@Override
public void handle(ICcApplicationContext ccAppCtx) throws HyracksDataException, InterruptedException {
ICCServiceContext ccSrvContext = ccAppCtx.getServiceContext();
ClusterControllerService ccSrv = (ClusterControllerService) ccSrvContext.getControllerService();
CCApplication ccApp = (CCApplication) ccSrv.getApplication();
CCMessageBroker messageBroker = (CCMessageBroker) ccSrvContext.getMessageBroker();
CCExtensionManager ccExtMgr = (CCExtensionManager) ccAppCtx.getExtensionManager();
ILangCompilationProvider compilationProvider = ccExtMgr.getCompilationProvider(lang);
IStorageComponentProvider storageComponentProvider = ccAppCtx.getStorageComponentProvider();
IStatementExecutorFactory statementExecutorFactory = ccApp.getStatementExecutorFactory();
IStatementExecutorContext statementExecutorContext = ccApp.getStatementExecutorContext();
ccSrv.getExecutor().submit(() -> {
ExecuteStatementResponseMessage responseMsg = new ExecuteStatementResponseMessage(requestMessageId);
try {
final IClusterManagementWork.ClusterState clusterState = ClusterStateManager.INSTANCE.getState();
if (clusterState != IClusterManagementWork.ClusterState.ACTIVE) {
throw new IllegalStateException("Cannot execute request, cluster is " + clusterState);
}
IParser parser = compilationProvider.getParserFactory().createParser(statementsText);
List<Statement> statements = parser.parse();
StringWriter outWriter = new StringWriter(256);
PrintWriter outPrinter = new PrintWriter(outWriter);
SessionOutput.ResultDecorator resultPrefix = ResultUtil.createPreResultDecorator();
SessionOutput.ResultDecorator resultPostfix = ResultUtil.createPostResultDecorator();
SessionOutput.ResultAppender appendHandle = ResultUtil.createResultHandleAppender(handleUrl);
SessionOutput.ResultAppender appendStatus = ResultUtil.createResultStatusAppender();
SessionOutput sessionOutput = new SessionOutput(sessionConfig, outPrinter, resultPrefix, resultPostfix, appendHandle, appendStatus);
IStatementExecutor.ResultMetadata outMetadata = new IStatementExecutor.ResultMetadata();
MetadataManager.INSTANCE.init();
IStatementExecutor translator = statementExecutorFactory.create(ccAppCtx, statements, sessionOutput, compilationProvider, storageComponentProvider);
translator.compileAndExecute(ccAppCtx.getHcc(), null, delivery, outMetadata, new IStatementExecutor.Stats(), clientContextID, statementExecutorContext);
outPrinter.close();
responseMsg.setResult(outWriter.toString());
responseMsg.setMetadata(outMetadata);
} catch (AlgebricksException | HyracksException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
// we trust that "our" exceptions are serializable and have a comprehensible error message
GlobalConfig.ASTERIX_LOGGER.log(Level.WARNING, pe.getMessage(), pe);
responseMsg.setError(pe);
} catch (Exception e) {
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Unexpected exception", e);
responseMsg.setError(new Exception(e.toString()));
}
try {
messageBroker.sendApplicationMessageToNC(responseMsg, requestNodeId);
} catch (Exception e) {
LOGGER.log(Level.WARNING, e.toString(), e);
}
});
}
use of org.apache.asterix.lang.common.base.Statement in project asterixdb by apache.
the class AQLToSQLPPConverter method convert.
private static void convert(File src, File dest) throws Exception {
if (src.isFile()) {
BufferedReader parserReader = new BufferedReader(new InputStreamReader(new FileInputStream(src)));
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(src)));
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(dest)));
try {
String line = null;
while ((line = reader.readLine()) != null) {
if (line.startsWith("/*") || line.startsWith(" *") || line.startsWith("*") || line.startsWith("\t") || line.startsWith(" \t")) {
writer.write(line + "\n");
} else {
break;
}
}
writer.write("\n");
reader.close();
IParser parser = aqlParserFactory.createParser(parserReader);
List<Statement> statements = parser.parse();
parserReader.close();
String sqlString = AQLFormatPrintUtil.toSQLPPString(statements);
writer.write(sqlString);
} catch (Exception e) {
System.out.println("AQL parser fails at: " + src.getAbsolutePath());
//e.printStackTrace();
} finally {
parserReader.close();
reader.close();
writer.close();
}
BufferedReader sqlReader = new BufferedReader(new InputStreamReader(new FileInputStream(dest)));
try {
IParser sqlParser = sqlppParserFactory.createParser(sqlReader);
sqlParser.parse();
} catch (Exception e) {
System.out.println("SQL++ parser cannot parse: ");
System.out.println(dest.getAbsolutePath());
e.printStackTrace();
} finally {
sqlReader.close();
}
return;
}
for (File child : src.listFiles()) {
String lastName = child.getName();
lastName = lastName.replaceAll("\\.aql", "\\.sqlpp");
File targetChild = new File(dest, lastName);
if (child.isDirectory()) {
FileUtils.forceMkdir(targetChild);
} else {
targetChild.createNewFile();
}
convert(child, targetChild);
}
}
use of org.apache.asterix.lang.common.base.Statement in project asterixdb by apache.
the class ParserTestExecutor method testSQLPPParser.
// Tests the SQL++ parser.
public void testSQLPPParser(File queryFile, File actualResultFile, File expectedFile) throws Exception {
actualResultFile.getParentFile().mkdirs();
PrintWriter writer = new PrintWriter(new FileOutputStream(actualResultFile));
IParser parser = sqlppParserFactory.createParser(readTestFile(queryFile));
GlobalConfig.ASTERIX_LOGGER.info(queryFile.toString());
try {
List<Statement> statements = parser.parse();
List<FunctionDecl> functions = getDeclaredFunctions(statements);
String dvName = getDefaultDataverse(statements);
MetadataProvider metadataProvider = mock(MetadataProvider.class);
@SuppressWarnings("unchecked") Map<String, String> config = mock(Map.class);
when(metadataProvider.getDefaultDataverseName()).thenReturn(dvName);
when(metadataProvider.getConfig()).thenReturn(config);
when(config.get(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS)).thenReturn("true");
when(metadataProvider.findDataset(anyString(), anyString())).thenReturn(mock(Dataset.class));
for (Statement st : statements) {
if (st.getKind() == Statement.Kind.QUERY) {
Query query = (Query) st;
IQueryRewriter rewriter = sqlppRewriterFactory.createQueryRewriter();
rewrite(rewriter, functions, query, metadataProvider, new LangRewritingContext(query.getVarCounter()));
// Tests deep copy and deep equality.
Query copiedQuery = (Query) SqlppRewriteUtil.deepCopy(query);
Assert.assertEquals(query.hashCode(), copiedQuery.hashCode());
Assert.assertEquals(query, copiedQuery);
}
SqlppAstPrintUtil.print(st, writer);
}
writer.close();
// Compares the actual result and the expected result.
runScriptAndCompareWithResult(queryFile, new PrintWriter(System.err), expectedFile, actualResultFile, ComparisonEnum.TEXT);
} catch (Exception e) {
GlobalConfig.ASTERIX_LOGGER.warning("Failed while testing file " + queryFile);
throw e;
} finally {
writer.close();
}
}
use of org.apache.asterix.lang.common.base.Statement in project asterixdb by apache.
the class ADGenDmlTranslator method translate.
public void translate() throws AsterixException, MetadataException, AlgebricksException {
String defaultDataverse = getDefaultDataverse();
types = new HashMap<TypeSignature, IAType>();
typeDataGenMap = new HashMap<TypeSignature, TypeDataGen>();
for (Statement stmt : aqlStatements) {
if (stmt.getKind() == Statement.Kind.TYPE_DECL) {
TypeDecl td = (TypeDecl) stmt;
String typeDataverse = td.getDataverseName() == null ? defaultDataverse : td.getDataverseName().getValue();
Map<TypeSignature, IAType> typeInStmt = TypeTranslator.computeTypes(mdTxnCtx, td.getTypeDef(), td.getIdent().getValue(), typeDataverse, types);
types.putAll(typeInStmt);
TypeSignature signature = new TypeSignature(typeDataverse, td.getIdent().getValue());
TypeDataGen tdg = td.getDatagenAnnotation();
if (tdg != null) {
typeDataGenMap.put(signature, tdg);
}
}
}
}
Aggregations