use of edu.princeton.cs.algs4.ST in project antlr4 by antlr.
the class BaseSwiftTest method writeParserTestFile.
protected void writeParserTestFile(String parserName, String lexerName, String parserStartRuleName, boolean debug, boolean profile) {
ST outputFileST = new ST("import Antlr4\n" + "import Foundation\n" + "setbuf(__stdoutp, nil)\n" + "class TreeShapeListener: ParseTreeListener{\n" + " func visitTerminal(_ node: TerminalNode){ }\n" + " func visitErrorNode(_ node: ErrorNode){ }\n" + " func enterEveryRule(_ ctx: ParserRuleContext) throws { }\n" + " func exitEveryRule(_ ctx: ParserRuleContext) throws {\n" + " for i in 0..\\<ctx.getChildCount() {\n" + " let parent = ctx.getChild(i)?.getParent()\n" + " if (!(parent is RuleNode) || (parent as! RuleNode ).getRuleContext() !== ctx) {\n" + " throw ANTLRError.illegalState(msg: \"Invalid parse tree shape detected.\")\n" + " }\n" + " }\n" + " }\n" + "}\n" + "\n" + "do {\n" + "let args = CommandLine.arguments\n" + "let input = ANTLRFileStream(args[1])\n" + "let lex = <lexerName>(input)\n" + "let tokens = CommonTokenStream(lex)\n" + "<createParser>\n" + "parser.setBuildParseTree(true)\n" + "<profile>\n" + "let tree = try parser.<parserStartRuleName>()\n" + "<if(profile)>print(profiler.getDecisionInfo().description)<endif>\n" + "try ParseTreeWalker.DEFAULT.walk(TreeShapeListener(), tree)\n" + "}catch ANTLRException.cannotInvokeStartRule {\n" + " print(\"error occur: cannotInvokeStartRule\")\n" + "}catch ANTLRException.recognition(let e ) {\n" + " print(\"error occur\\(e)\")\n" + "}catch {\n" + " print(\"error occur\")\n" + "}\n");
ST createParserST = new ST(" let parser = try <parserName>(tokens)\n");
if (debug) {
createParserST = new ST(" let parser = try <parserName>(tokens)\n" + " parser.addErrorListener(DiagnosticErrorListener())\n");
}
if (profile) {
outputFileST.add("profile", "let profiler = ProfilingATNSimulator(parser)\n" + "parser.setInterpreter(profiler)");
} else {
outputFileST.add("profile", new ArrayList<Object>());
}
outputFileST.add("createParser", createParserST);
outputFileST.add("parserName", parserName);
outputFileST.add("lexerName", lexerName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "main.swift", outputFileST.render());
}
use of edu.princeton.cs.algs4.ST in project antlr4 by antlr.
the class BaseSwiftTest method writeLexerTestFile.
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
ST outputFileST = new ST("import Antlr4\n" + "import Foundation\n" + "setbuf(__stdoutp, nil)\n" + "let args = CommandLine.arguments\n" + "let input = ANTLRFileStream(args[1])\n" + "let lex = <lexerName>(input)\n" + "let tokens = CommonTokenStream(lex)\n" + "do {\n" + " try tokens.fill()\n" + "} catch ANTLRException.cannotInvokeStartRule {\n" + " print(\"error occur: cannotInvokeStartRule\")\n" + "} catch ANTLRException.recognition(let e ) {\n" + " print(\"error occur\\(e)\")\n" + "} catch {\n" + " print(\"error occur\")\n" + "}\n" + "for t in tokens.getTokens() {\n" + " print(t)\n" + "}\n" + (showDFA ? "print(lex.getInterpreter().getDFA(Lexer.DEFAULT_MODE).toLexerString(), terminator: \"\" )\n" : ""));
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "main.swift", outputFileST.render());
}
use of edu.princeton.cs.algs4.ST in project antlr4 by antlr.
the class LexerATNFactory method lexerCallCommand.
@Override
public Handle lexerCallCommand(GrammarAST ID, GrammarAST arg) {
LexerAction lexerAction = createLexerAction(ID, arg);
if (lexerAction != null) {
return action(ID, lexerAction);
}
// fall back to standard action generation for the command
ST cmdST = codegenTemplates.getInstanceOf("Lexer" + CharSupport.capitalize(ID.getText()) + "Command");
if (cmdST == null) {
g.tool.errMgr.grammarError(ErrorType.INVALID_LEXER_COMMAND, g.fileName, ID.token, ID.getText());
return epsilon(ID);
}
if (cmdST.impl.formalArguments == null || !cmdST.impl.formalArguments.containsKey("arg")) {
g.tool.errMgr.grammarError(ErrorType.UNWANTED_LEXER_COMMAND_ARGUMENT, g.fileName, ID.token, ID.getText());
return epsilon(ID);
}
cmdST.add("arg", arg.getText());
cmdST.add("grammar", arg.g);
return action(cmdST.render());
}
use of edu.princeton.cs.algs4.ST in project antlr4 by antlr.
the class CodeGenerator method getHeaderFileName.
public String getHeaderFileName() {
ST extST = getTemplates().getInstanceOf("headerFileExtension");
if (extST == null)
return null;
String recognizerName = g.getRecognizerName();
return recognizerName + extST.render();
}
use of edu.princeton.cs.algs4.ST in project hive by apache.
the class DDLTask method showCreateTable.
private int showCreateTable(Hive db, DataOutputStream outStream, String tableName) throws HiveException {
final String EXTERNAL = "external";
final String TEMPORARY = "temporary";
final String LIST_COLUMNS = "columns";
final String TBL_COMMENT = "tbl_comment";
final String LIST_PARTITIONS = "partitions";
final String SORT_BUCKET = "sort_bucket";
final String SKEWED_INFO = "tbl_skewedinfo";
final String ROW_FORMAT = "row_format";
final String TBL_LOCATION = "tbl_location";
final String TBL_PROPERTIES = "tbl_properties";
boolean needsLocation = true;
StringBuilder createTab_str = new StringBuilder();
Table tbl = db.getTable(tableName, false);
List<String> duplicateProps = new ArrayList<String>();
try {
needsLocation = doesTableNeedLocation(tbl);
if (tbl.isView()) {
String createTab_stmt = "CREATE VIEW `" + tableName + "` AS " + tbl.getViewExpandedText();
outStream.write(createTab_stmt.getBytes(StandardCharsets.UTF_8));
return 0;
}
createTab_str.append("CREATE <" + TEMPORARY + "><" + EXTERNAL + ">TABLE `");
createTab_str.append(tableName + "`(\n");
createTab_str.append("<" + LIST_COLUMNS + ">)\n");
createTab_str.append("<" + TBL_COMMENT + ">\n");
createTab_str.append("<" + LIST_PARTITIONS + ">\n");
createTab_str.append("<" + SORT_BUCKET + ">\n");
createTab_str.append("<" + SKEWED_INFO + ">\n");
createTab_str.append("<" + ROW_FORMAT + ">\n");
if (needsLocation) {
createTab_str.append("LOCATION\n");
createTab_str.append("<" + TBL_LOCATION + ">\n");
}
createTab_str.append("TBLPROPERTIES (\n");
createTab_str.append("<" + TBL_PROPERTIES + ">)\n");
ST createTab_stmt = new ST(createTab_str.toString());
// For cases where the table is temporary
String tbl_temp = "";
if (tbl.isTemporary()) {
duplicateProps.add("TEMPORARY");
tbl_temp = "TEMPORARY ";
}
// For cases where the table is external
String tbl_external = "";
if (tbl.getTableType() == TableType.EXTERNAL_TABLE) {
duplicateProps.add("EXTERNAL");
tbl_external = "EXTERNAL ";
}
// Columns
String tbl_columns = "";
List<FieldSchema> cols = tbl.getCols();
List<String> columns = new ArrayList<String>();
for (FieldSchema col : cols) {
String columnDesc = " `" + col.getName() + "` " + col.getType();
if (col.getComment() != null) {
columnDesc = columnDesc + " COMMENT '" + HiveStringUtils.escapeHiveCommand(col.getComment()) + "'";
}
columns.add(columnDesc);
}
tbl_columns = StringUtils.join(columns, ", \n");
// Table comment
String tbl_comment = "";
String tabComment = tbl.getProperty("comment");
if (tabComment != null) {
duplicateProps.add("comment");
tbl_comment = "COMMENT '" + HiveStringUtils.escapeHiveCommand(tabComment) + "'";
}
// Partitions
String tbl_partitions = "";
List<FieldSchema> partKeys = tbl.getPartitionKeys();
if (partKeys.size() > 0) {
tbl_partitions += "PARTITIONED BY ( \n";
List<String> partCols = new ArrayList<String>();
for (FieldSchema partKey : partKeys) {
String partColDesc = " `" + partKey.getName() + "` " + partKey.getType();
if (partKey.getComment() != null) {
partColDesc = partColDesc + " COMMENT '" + HiveStringUtils.escapeHiveCommand(partKey.getComment()) + "'";
}
partCols.add(partColDesc);
}
tbl_partitions += StringUtils.join(partCols, ", \n");
tbl_partitions += ")";
}
// Clusters (Buckets)
String tbl_sort_bucket = "";
List<String> buckCols = tbl.getBucketCols();
if (buckCols.size() > 0) {
duplicateProps.add("SORTBUCKETCOLSPREFIX");
tbl_sort_bucket += "CLUSTERED BY ( \n ";
tbl_sort_bucket += StringUtils.join(buckCols, ", \n ");
tbl_sort_bucket += ") \n";
List<Order> sortCols = tbl.getSortCols();
if (sortCols.size() > 0) {
tbl_sort_bucket += "SORTED BY ( \n";
// Order
List<String> sortKeys = new ArrayList<String>();
for (Order sortCol : sortCols) {
String sortKeyDesc = " " + sortCol.getCol() + " ";
if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC) {
sortKeyDesc = sortKeyDesc + "ASC";
} else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) {
sortKeyDesc = sortKeyDesc + "DESC";
}
sortKeys.add(sortKeyDesc);
}
tbl_sort_bucket += StringUtils.join(sortKeys, ", \n");
tbl_sort_bucket += ") \n";
}
tbl_sort_bucket += "INTO " + tbl.getNumBuckets() + " BUCKETS";
}
// Skewed Info
StringBuilder tbl_skewedinfo = new StringBuilder();
SkewedInfo skewedInfo = tbl.getSkewedInfo();
if (skewedInfo != null && !skewedInfo.getSkewedColNames().isEmpty()) {
tbl_skewedinfo.append("SKEWED BY (" + StringUtils.join(skewedInfo.getSkewedColNames(), ",") + ")\n");
tbl_skewedinfo.append(" ON (");
List<String> colValueList = new ArrayList<String>();
for (List<String> colValues : skewedInfo.getSkewedColValues()) {
colValueList.add("('" + StringUtils.join(colValues, "','") + "')");
}
tbl_skewedinfo.append(StringUtils.join(colValueList, ",") + ")");
if (tbl.isStoredAsSubDirectories()) {
tbl_skewedinfo.append("\n STORED AS DIRECTORIES");
}
}
// Row format (SerDe)
StringBuilder tbl_row_format = new StringBuilder();
StorageDescriptor sd = tbl.getTTable().getSd();
SerDeInfo serdeInfo = sd.getSerdeInfo();
Map<String, String> serdeParams = serdeInfo.getParameters();
tbl_row_format.append("ROW FORMAT SERDE \n");
tbl_row_format.append(" '" + HiveStringUtils.escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n");
if (tbl.getStorageHandler() == null) {
// SERDE properties
if (Warehouse.DEFAULT_SERIALIZATION_FORMAT.equals(serdeParams.get(serdeConstants.SERIALIZATION_FORMAT))) {
serdeParams.remove(serdeConstants.SERIALIZATION_FORMAT);
}
if (!serdeParams.isEmpty()) {
appendSerdeParams(tbl_row_format, serdeParams).append(" \n");
}
tbl_row_format.append("STORED AS INPUTFORMAT \n '" + HiveStringUtils.escapeHiveCommand(sd.getInputFormat()) + "' \n");
tbl_row_format.append("OUTPUTFORMAT \n '" + HiveStringUtils.escapeHiveCommand(sd.getOutputFormat()) + "'");
} else {
duplicateProps.add(META_TABLE_STORAGE);
tbl_row_format.append("STORED BY \n '" + HiveStringUtils.escapeHiveCommand(tbl.getParameters().get(META_TABLE_STORAGE)) + "' \n");
// SerDe Properties
if (!serdeParams.isEmpty()) {
appendSerdeParams(tbl_row_format, serdeInfo.getParameters());
}
}
String tbl_location = " '" + HiveStringUtils.escapeHiveCommand(sd.getLocation()) + "'";
// Table properties
duplicateProps.addAll(Arrays.asList(StatsSetupConst.TABLE_PARAMS_STATS_KEYS));
String tbl_properties = propertiesToString(tbl.getParameters(), duplicateProps);
createTab_stmt.add(TEMPORARY, tbl_temp);
createTab_stmt.add(EXTERNAL, tbl_external);
createTab_stmt.add(LIST_COLUMNS, tbl_columns);
createTab_stmt.add(TBL_COMMENT, tbl_comment);
createTab_stmt.add(LIST_PARTITIONS, tbl_partitions);
createTab_stmt.add(SORT_BUCKET, tbl_sort_bucket);
createTab_stmt.add(SKEWED_INFO, tbl_skewedinfo);
createTab_stmt.add(ROW_FORMAT, tbl_row_format);
// Table location should not be printed with hbase backed tables
if (needsLocation) {
createTab_stmt.add(TBL_LOCATION, tbl_location);
}
createTab_stmt.add(TBL_PROPERTIES, tbl_properties);
outStream.write(createTab_stmt.render().getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
LOG.info("show create table: ", e);
return 1;
}
return 0;
}
Aggregations