use of com.qlangtech.tis.sql.parser.tuple.creator.EntityName in project tis by qlangtech.
the class FlatTableRelation method buildQueryHeaderByTailerInfoMethodName.
private String buildQueryHeaderByTailerInfoMethodName() {
final EntityName tailerEntity = this.getTailerEntity();
final EntityName headEntity = this.getHeaderEntity();
return "query" + headEntity.javaPropTableName() + "By" + tailerEntity.javaPropTableName();
}
use of com.qlangtech.tis.sql.parser.tuple.creator.EntityName in project tis by qlangtech.
the class TabCriteriaEntityRecognizeVisitor method visit.
@Override
public void visit(TableTupleCreator tableTuple) {
TabCriteria tableCriteria = tabCriteria;
// join 部分逻辑处理
if (!tableCriteria.isPrimary()) {
JoinOn joinOn = tableCriteria.getJoinOn();
ComparisonExpression compare = null;
LogicalBinaryExpression logic = null;
if (joinOn.getExpression() instanceof ComparisonExpression) {
// (tp.totalpay_id = o.totalpay_id)
compare = (ComparisonExpression) joinOn.getExpression();
} else if (joinOn.getExpression() instanceof LogicalBinaryExpression) {
// ((tp.card_id = cc.id) AND (tp.entity_id = cc.entity_id))
logic = (LogicalBinaryExpression) joinOn.getExpression();
}
}
if (this.subQuery != null) {
// 是像 totalpay/order_customers.txt 文件中那样有内嵌子查询的
SqlTaskNode subqueryTaskNode = new SqlTaskNode(EntityName.parse(tableCriteria.getName()), NodeType.JOINER_SQL, this.dumpNodsContext);
// Map<ColName, ValueOperator> columnTracer = Maps.newHashMap();
// Rewriter rewriter = Rewriter.create(columnTracer);
subqueryTaskNode.setContent(SqlFormatter.formatSql(this.subQuery.getQuery().getQueryBody(), Optional.empty()));
TableDependencyVisitor dependenciesVisitor = TableDependencyVisitor.create();
Query query = SqlTaskNode.parseQuery(subqueryTaskNode.getContent());
dependenciesVisitor.process(query, null);
dependenciesVisitor.getTabDependencies().stream().forEach((table) -> {
// ;
//
// List<TableTupleCreator> tables = SqlTaskNode.dumpNodes.get(table);
// if (tables.size() != 1) {
// throw new IllegalStateException("table:" + table + " relevant tab has more than 1 match");
// }
// tables.get(0).getEntityName();
EntityName firstMatch = dumpNodsContext.accurateMatch(table);
subqueryTaskNode.addRequired(firstMatch, new SqlTaskNode(firstMatch, NodeType.DUMP, this.dumpNodsContext));
});
final TableTupleCreator tupleCreator = subqueryTaskNode.parse(true);
tableTuple.setColsRefs(tupleCreator.getColsRefs());
tableTuple.setRealEntityName(tupleCreator.getEntityName());
// tableTuple.setEntityRef(new EntitiyRef(tableCriteria.getName(),
// subqueryTaskNode));
} else {
// tableTuple.setEntityRef(new EntitiyRef(tableCriteria.getName()));
// List<TableTupleCreator> tabs = null;
EntityName ename = this.dumpNodsContext.nullableMatch(tableCriteria.getName());
if (ename != null) {
tableTuple.setNodetype(NodeType.DUMP);
tableTuple.setRealEntityName(ename);
} else {
tableTuple.setRealEntityName(EntityName.parse(tableCriteria.getName()));
}
// if ((tabs = SqlTaskNode.dumpNodes.get(tableCriteria.getName())) != null) {
// tableTuple.setNodetype(NodeType.DUMP);
//
// if (tabs.size() != 1) {
// throw new IllegalStateException(
// "tabname:" + tableCriteria.getName() + " relevant tab size shall be 1 but " + tabs.size());
// } else {
// tableTuple.setRealEntityName(tabs.get(0).getEntityName());
// }
// } else {
// tableTuple.setRealEntityName(EntityName.parse(tableCriteria.getName()));
// }
}
}
use of com.qlangtech.tis.sql.parser.tuple.creator.EntityName in project plugins by qlangtech.
the class HiveTask method executeSql.
@Override
protected void executeSql(String taskname, String sql) {
this.validateDependenciesNode(taskname);
final Connection conn = this.getTaskContext().getObj();
final EntityName newCreateTab = EntityName.parse(this.nodeMeta.getExportName());
// final String newCreatePt = primaryTable.getTabPartition();
this.getContent();
List<String> allpts = null;
try {
logger.info("\n execute hive task:{} \n{}", taskname, sql);
HiveDBUtils.execute(conn, sql, joinTaskStatus);
// 将当前的join task的partition设置到当前上下文中
TabPartitions dumpPartition = this.getDumpPartition();
dumpPartition.putPt(newCreateTab, this.rewriteSql.primaryTable);
allpts = HiveRemoveHistoryDataTask.getHistoryPts(conn, newCreateTab);
} catch (Exception e) {
// TODO 一旦有异常要将整个链路执行都停下来
throw new RuntimeException("taskname:" + taskname, e);
}
IAliasTable child = null;
// 校验最新的Partition 是否已经生成
if (!allpts.contains(this.rewriteSql.primaryTable.getPt())) {
StringBuffer errInfo = new StringBuffer();
errInfo.append("\ntable:" + newCreateTab + "," + IDumpTable.PARTITION_PT + ":" + this.rewriteSql.primaryTable + " is not exist in exist partition set [" + Joiner.on(",").join(allpts) + "]");
child = this.rewriteSql.primaryTable.getChild();
if (child != null && !child.isSubQueryTable()) {
try {
allpts = HiveRemoveHistoryDataTask.getHistoryPts(conn, child.getTable());
} catch (Exception e) {
throw new RuntimeException(child.getTable().getFullName(), e);
}
errInfo.append("\n\t child table:").append(child.getTable()).append(",").append(IDumpTable.PARTITION_PT).append(":").append(this.rewriteSql.primaryTable).append(" is not exist in exist partition set [").append(Joiner.on(",").join(allpts)).append("]");
}
throw new IllegalStateException(errInfo.toString());
}
}
use of com.qlangtech.tis.sql.parser.tuple.creator.EntityName in project plugins by qlangtech.
the class HiveRemoveHistoryDataTask method dropHistoryHiveTable.
/**
* 删除hive中的历史表
*/
public List<FSHistoryFileUtils.PathInfo> dropHistoryHiveTable(EntityName dumpTable, Connection conn, PartitionFilter filter, Integer maxPartitionSave) {
if (maxPartitionSave < 1) {
throw new IllegalArgumentException("param maxPartitionSave can not small than 1");
}
final EntityName table = dumpTable;
if (StringUtils.isBlank(pt)) {
throw new IllegalStateException("pt name shall be set");
}
String existTimestamp = null;
FSHistoryFileUtils.PathInfo pathInfo = null;
List<FSHistoryFileUtils.PathInfo> deletePts = Lists.newArrayList();
try {
// 判断表是否存在
if (!BindHiveTableTool.HiveTableBuilder.isTableExists(this.mrEngine, conn, table)) {
logger.info(table + " is not exist");
return Collections.emptyList();
}
List<String> ptList = getHistoryPts(conn, filter, table);
int count = 0;
for (int i = ptList.size() - 1; i >= 0; i--) {
if ((++count) > maxPartitionSave) {
existTimestamp = ptList.get(i);
pathInfo = new FSHistoryFileUtils.PathInfo();
pathInfo.setTimeStamp(Long.parseLong(existTimestamp));
pathInfo.setPathName(existTimestamp);
deletePts.add(pathInfo);
String alterSql = "alter table " + table + " drop partition ( " + pt + " = '" + existTimestamp + "' )";
try {
HiveDBUtils.execute(conn, alterSql);
} catch (Throwable e) {
logger.error("alterSql:" + alterSql, e);
}
logger.info("history table:" + table + ", partition:" + pt + "='" + existTimestamp + "', have been removed");
}
}
logger.info("maxPartitionSave:" + maxPartitionSave + ",table:" + table.getFullName() + " exist partitions:" + ptList.stream().collect(Collectors.joining(",")) + " dropped partitions:" + deletePts.stream().map((p) -> p.getPathName()).collect(Collectors.joining(",")));
} catch (Exception e) {
throw new RuntimeException(e);
}
return deletePts;
}
use of com.qlangtech.tis.sql.parser.tuple.creator.EntityName in project plugins by qlangtech.
the class JoinHiveTask method executeSql.
@Override
protected void executeSql(String taskName, String rewritedSql) {
// 处理历史表,多余的partition要删除,表不同了需要删除重建
processJoinTask(rewritedSql);
final EntityName newCreateTab = EntityName.parse(this.nodeMeta.getExportName());
final String insertSql = SQL_INSERT_TABLE.format(new Object[] { newCreateTab.getFullName(), rewritedSql });
super.executeSql(taskName, insertSql);
}
Aggregations