Search in sources :

Example 6 with EntityName

use of com.qlangtech.tis.sql.parser.tuple.creator.EntityName in project tis by qlangtech.

the class FlatTableRelation method buildQueryHeaderByTailerInfoMethodName.

private String buildQueryHeaderByTailerInfoMethodName() {
    final EntityName tailerEntity = this.getTailerEntity();
    final EntityName headEntity = this.getHeaderEntity();
    return "query" + headEntity.javaPropTableName() + "By" + tailerEntity.javaPropTableName();
}
Also used : EntityName(com.qlangtech.tis.sql.parser.tuple.creator.EntityName)

Example 7 with EntityName

use of com.qlangtech.tis.sql.parser.tuple.creator.EntityName in project tis by qlangtech.

the class TabCriteriaEntityRecognizeVisitor method visit.

@Override
public void visit(TableTupleCreator tableTuple) {
    TabCriteria tableCriteria = tabCriteria;
    // join 部分逻辑处理
    if (!tableCriteria.isPrimary()) {
        JoinOn joinOn = tableCriteria.getJoinOn();
        ComparisonExpression compare = null;
        LogicalBinaryExpression logic = null;
        if (joinOn.getExpression() instanceof ComparisonExpression) {
            // (tp.totalpay_id = o.totalpay_id)
            compare = (ComparisonExpression) joinOn.getExpression();
        } else if (joinOn.getExpression() instanceof LogicalBinaryExpression) {
            // ((tp.card_id = cc.id) AND (tp.entity_id = cc.entity_id))
            logic = (LogicalBinaryExpression) joinOn.getExpression();
        }
    }
    if (this.subQuery != null) {
        // 是像 totalpay/order_customers.txt 文件中那样有内嵌子查询的
        SqlTaskNode subqueryTaskNode = new SqlTaskNode(EntityName.parse(tableCriteria.getName()), NodeType.JOINER_SQL, this.dumpNodsContext);
        // Map<ColName, ValueOperator> columnTracer = Maps.newHashMap();
        // Rewriter rewriter = Rewriter.create(columnTracer);
        subqueryTaskNode.setContent(SqlFormatter.formatSql(this.subQuery.getQuery().getQueryBody(), Optional.empty()));
        TableDependencyVisitor dependenciesVisitor = TableDependencyVisitor.create();
        Query query = SqlTaskNode.parseQuery(subqueryTaskNode.getContent());
        dependenciesVisitor.process(query, null);
        dependenciesVisitor.getTabDependencies().stream().forEach((table) -> {
            // ;
            // 
            // List<TableTupleCreator> tables = SqlTaskNode.dumpNodes.get(table);
            // if (tables.size() != 1) {
            // throw new IllegalStateException("table:" + table + " relevant tab has more than 1 match");
            // }
            // tables.get(0).getEntityName();
            EntityName firstMatch = dumpNodsContext.accurateMatch(table);
            subqueryTaskNode.addRequired(firstMatch, new SqlTaskNode(firstMatch, NodeType.DUMP, this.dumpNodsContext));
        });
        final TableTupleCreator tupleCreator = subqueryTaskNode.parse(true);
        tableTuple.setColsRefs(tupleCreator.getColsRefs());
        tableTuple.setRealEntityName(tupleCreator.getEntityName());
    // tableTuple.setEntityRef(new EntitiyRef(tableCriteria.getName(),
    // subqueryTaskNode));
    } else {
        // tableTuple.setEntityRef(new EntitiyRef(tableCriteria.getName()));
        // List<TableTupleCreator> tabs = null;
        EntityName ename = this.dumpNodsContext.nullableMatch(tableCriteria.getName());
        if (ename != null) {
            tableTuple.setNodetype(NodeType.DUMP);
            tableTuple.setRealEntityName(ename);
        } else {
            tableTuple.setRealEntityName(EntityName.parse(tableCriteria.getName()));
        }
    // if ((tabs = SqlTaskNode.dumpNodes.get(tableCriteria.getName())) != null) {
    // tableTuple.setNodetype(NodeType.DUMP);
    // 
    // if (tabs.size() != 1) {
    // throw new IllegalStateException(
    // "tabname:" + tableCriteria.getName() + " relevant tab size shall be 1 but " + tabs.size());
    // } else {
    // tableTuple.setRealEntityName(tabs.get(0).getEntityName());
    // }
    // } else {
    // tableTuple.setRealEntityName(EntityName.parse(tableCriteria.getName()));
    // }
    }
}
Also used : LogicalBinaryExpression(com.facebook.presto.sql.tree.LogicalBinaryExpression) SqlTaskNode(com.qlangtech.tis.sql.parser.SqlTaskNode) TabCriteria(com.qlangtech.tis.sql.parser.visitor.TableReferenceVisitor.TabCriteria) ComparisonExpression(com.facebook.presto.sql.tree.ComparisonExpression) Query(com.facebook.presto.sql.tree.Query) EntityName(com.qlangtech.tis.sql.parser.tuple.creator.EntityName) TableDependencyVisitor(com.qlangtech.tis.sql.parser.visitor.TableDependencyVisitor) JoinOn(com.facebook.presto.sql.tree.JoinOn)

Example 8 with EntityName

use of com.qlangtech.tis.sql.parser.tuple.creator.EntityName in project plugins by qlangtech.

the class HiveTask method executeSql.

@Override
protected void executeSql(String taskname, String sql) {
    this.validateDependenciesNode(taskname);
    final Connection conn = this.getTaskContext().getObj();
    final EntityName newCreateTab = EntityName.parse(this.nodeMeta.getExportName());
    // final String newCreatePt = primaryTable.getTabPartition();
    this.getContent();
    List<String> allpts = null;
    try {
        logger.info("\n execute hive task:{} \n{}", taskname, sql);
        HiveDBUtils.execute(conn, sql, joinTaskStatus);
        // 将当前的join task的partition设置到当前上下文中
        TabPartitions dumpPartition = this.getDumpPartition();
        dumpPartition.putPt(newCreateTab, this.rewriteSql.primaryTable);
        allpts = HiveRemoveHistoryDataTask.getHistoryPts(conn, newCreateTab);
    } catch (Exception e) {
        // TODO 一旦有异常要将整个链路执行都停下来
        throw new RuntimeException("taskname:" + taskname, e);
    }
    IAliasTable child = null;
    // 校验最新的Partition 是否已经生成
    if (!allpts.contains(this.rewriteSql.primaryTable.getPt())) {
        StringBuffer errInfo = new StringBuffer();
        errInfo.append("\ntable:" + newCreateTab + "," + IDumpTable.PARTITION_PT + ":" + this.rewriteSql.primaryTable + " is not exist in exist partition set [" + Joiner.on(",").join(allpts) + "]");
        child = this.rewriteSql.primaryTable.getChild();
        if (child != null && !child.isSubQueryTable()) {
            try {
                allpts = HiveRemoveHistoryDataTask.getHistoryPts(conn, child.getTable());
            } catch (Exception e) {
                throw new RuntimeException(child.getTable().getFullName(), e);
            }
            errInfo.append("\n\t child table:").append(child.getTable()).append(",").append(IDumpTable.PARTITION_PT).append(":").append(this.rewriteSql.primaryTable).append(" is not exist in exist partition set [").append(Joiner.on(",").join(allpts)).append("]");
        }
        throw new IllegalStateException(errInfo.toString());
    }
}
Also used : EntityName(com.qlangtech.tis.sql.parser.tuple.creator.EntityName) Connection(java.sql.Connection) IAliasTable(com.qlangtech.tis.sql.parser.IAliasTable) TabPartitions(com.qlangtech.tis.sql.parser.TabPartitions)

Example 9 with EntityName

use of com.qlangtech.tis.sql.parser.tuple.creator.EntityName in project plugins by qlangtech.

the class HiveRemoveHistoryDataTask method dropHistoryHiveTable.

/**
 * 删除hive中的历史表
 */
public List<FSHistoryFileUtils.PathInfo> dropHistoryHiveTable(EntityName dumpTable, Connection conn, PartitionFilter filter, Integer maxPartitionSave) {
    if (maxPartitionSave < 1) {
        throw new IllegalArgumentException("param maxPartitionSave can not small than 1");
    }
    final EntityName table = dumpTable;
    if (StringUtils.isBlank(pt)) {
        throw new IllegalStateException("pt name shall be set");
    }
    String existTimestamp = null;
    FSHistoryFileUtils.PathInfo pathInfo = null;
    List<FSHistoryFileUtils.PathInfo> deletePts = Lists.newArrayList();
    try {
        // 判断表是否存在
        if (!BindHiveTableTool.HiveTableBuilder.isTableExists(this.mrEngine, conn, table)) {
            logger.info(table + " is not exist");
            return Collections.emptyList();
        }
        List<String> ptList = getHistoryPts(conn, filter, table);
        int count = 0;
        for (int i = ptList.size() - 1; i >= 0; i--) {
            if ((++count) > maxPartitionSave) {
                existTimestamp = ptList.get(i);
                pathInfo = new FSHistoryFileUtils.PathInfo();
                pathInfo.setTimeStamp(Long.parseLong(existTimestamp));
                pathInfo.setPathName(existTimestamp);
                deletePts.add(pathInfo);
                String alterSql = "alter table " + table + " drop partition (  " + pt + " = '" + existTimestamp + "' )";
                try {
                    HiveDBUtils.execute(conn, alterSql);
                } catch (Throwable e) {
                    logger.error("alterSql:" + alterSql, e);
                }
                logger.info("history table:" + table + ", partition:" + pt + "='" + existTimestamp + "', have been removed");
            }
        }
        logger.info("maxPartitionSave:" + maxPartitionSave + ",table:" + table.getFullName() + " exist partitions:" + ptList.stream().collect(Collectors.joining(",")) + " dropped partitions:" + deletePts.stream().map((p) -> p.getPathName()).collect(Collectors.joining(",")));
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
    return deletePts;
}
Also used : IDumpTable(com.qlangtech.tis.fullbuild.indexbuild.IDumpTable) StringUtils(org.apache.commons.lang.StringUtils) java.util(java.util) Logger(org.slf4j.Logger) Connection(java.sql.Connection) MREngine(com.qlangtech.tis.plugin.datax.MREngine) EntityName(com.qlangtech.tis.sql.parser.tuple.creator.EntityName) LoggerFactory(org.slf4j.LoggerFactory) Collectors(java.util.stream.Collectors) ITableDumpConstant(com.qlangtech.tis.order.dump.task.ITableDumpConstant) Lists(com.google.common.collect.Lists) Matcher(java.util.regex.Matcher) Pattern(java.util.regex.Pattern) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) FSHistoryFileUtils(com.qlangtech.tis.fs.FSHistoryFileUtils) EntityName(com.qlangtech.tis.sql.parser.tuple.creator.EntityName) FSHistoryFileUtils(com.qlangtech.tis.fs.FSHistoryFileUtils)

Example 10 with EntityName

use of com.qlangtech.tis.sql.parser.tuple.creator.EntityName in project plugins by qlangtech.

the class JoinHiveTask method executeSql.

@Override
protected void executeSql(String taskName, String rewritedSql) {
    // 处理历史表,多余的partition要删除,表不同了需要删除重建
    processJoinTask(rewritedSql);
    final EntityName newCreateTab = EntityName.parse(this.nodeMeta.getExportName());
    final String insertSql = SQL_INSERT_TABLE.format(new Object[] { newCreateTab.getFullName(), rewritedSql });
    super.executeSql(taskName, insertSql);
}
Also used : EntityName(com.qlangtech.tis.sql.parser.tuple.creator.EntityName)

Aggregations

EntityName (com.qlangtech.tis.sql.parser.tuple.creator.EntityName)17 DependencyNode (com.qlangtech.tis.sql.parser.meta.DependencyNode)6 Collectors (java.util.stream.Collectors)5 StringUtils (org.apache.commons.lang.StringUtils)5 Connection (java.sql.Connection)4 ITISFileSystem (com.qlangtech.tis.fs.ITISFileSystem)3 TisGroupBy (com.qlangtech.tis.sql.parser.TisGroupBy)3 FuncFormat (com.qlangtech.tis.sql.parser.visitor.FuncFormat)3 Lists (com.google.common.collect.Lists)2 IDumpTable (com.qlangtech.tis.fullbuild.indexbuild.IDumpTable)2 LinkKeys (com.qlangtech.tis.sql.parser.er.LinkKeys)2 PrimaryTableMeta (com.qlangtech.tis.sql.parser.er.PrimaryTableMeta)2 TabCardinality (com.qlangtech.tis.sql.parser.er.TabCardinality)2 TableRelation (com.qlangtech.tis.sql.parser.er.TableRelation)2 FlatTableRelation (com.qlangtech.tis.sql.parser.stream.generate.FlatTableRelation)2 java.util (java.util)2 List (java.util.List)2 Optional (java.util.Optional)2 Set (java.util.Set)2 Pattern (java.util.regex.Pattern)2