Search in sources :

Example 1 with HiveColumn

use of com.qlangtech.tis.hive.HiveColumn in project plugins by qlangtech.

the class BindHiveTableTool method getColumns.

@SuppressWarnings("all")
public static List<HiveColumn> getColumns(ITISFileSystem fs, EntityName hiveTable, String timestamp) throws IOException {
    String hivePath = hiveTable.getNameWithPath();
    InputStream input = null;
    List<HiveColumn> cols = new ArrayList<>();
    try {
        input = fs.open(fs.getPath(fs.getRootDir() + "/" + hivePath + "/all/" + timestamp + "/" + ColumnMetaData.KEY_COLS_METADATA));
        // input = fileSystem.open(path);
        String content = IOUtils.toString(input, TisUTF8.getName());
        JSONArray array = (JSONArray) JSON.parse(content);
        for (Object anArray : array) {
            JSONObject o = (JSONObject) anArray;
            HiveColumn col = new HiveColumn();
            col.setName(o.getString("key"));
            col.setIndex(o.getIntValue("index"));
            col.setType(getHiveType(o.getIntValue("type")).name());
            cols.add(col);
        }
    } finally {
        IOUtils.closeQuietly(input);
    }
    return cols;
}
Also used : JSONObject(com.alibaba.fastjson.JSONObject) InputStream(java.io.InputStream) ArrayList(java.util.ArrayList) JSONArray(com.alibaba.fastjson.JSONArray) JSONObject(com.alibaba.fastjson.JSONObject) HiveColumn(com.qlangtech.tis.hive.HiveColumn)

Example 2 with HiveColumn

use of com.qlangtech.tis.hive.HiveColumn in project plugins by qlangtech.

the class UnionHiveTask method getParsersString.

private String getParsersString() {
    StringBuilder sb = new StringBuilder();
    int parserSize = parserList.size();
    int parserCnt = 0;
    for (HiveInsertFromSelectParser parser : parserList) {
        Map<String, HiveColumn> columnMap = parser.getColsMap();
        sb.append("SELECT ");
        int columnSize = columnSet.size();
        int columnCnt = 0;
        for (String column : columnSet) {
            if (columnMap.containsKey(column)) {
                HiveColumn hiveColumn = columnMap.get(column);
                if (hiveColumn.hasAliasName()) {
                    sb.append(hiveColumn.getRawName()).append(" AS ").append(column);
                } else if (hiveColumn.hasDefaultValue()) {
                    sb.append(hiveColumn.getDefalutValue()).append(" AS ").append(column);
                } else {
                    sb.append(hiveColumn.getName());
                }
            } else {
                sb.append("'' AS ").append(column);
            }
            if (++columnCnt < columnSize) {
                sb.append(", ");
            }
        }
        sb.append(" FROM `").append(parser.getSourceTableName()).append("`");
        if (parser.getWhere() != null) {
            sb.append(" where ").append(getConditionString(parser.getWhere().getChild(0)));
        }
        if (++parserCnt < parserSize) {
            sb.append("\nUNION ALL\n");
        }
    }
    return sb.toString();
}
Also used : HiveColumn(com.qlangtech.tis.hive.HiveColumn) HiveInsertFromSelectParser(com.qlangtech.tis.hive.HiveInsertFromSelectParser)

Example 3 with HiveColumn

use of com.qlangtech.tis.hive.HiveColumn in project plugins by qlangtech.

the class BasicEngineJob method prepare.

public void prepare() {
    super.prepare();
    this.colsExcludePartitionCols = getCols();
    int[] appendStartIndex = new int[] { colsExcludePartitionCols.size() };
    List<HiveColumn> cols = Lists.newArrayList(colsExcludePartitionCols);
    IDumpTable.preservedPsCols.forEach((c) -> {
        HiveColumn hiveCol = new HiveColumn();
        hiveCol.setName(c);
        hiveCol.setType(SupportHiveDataType.STRING.name());
        hiveCol.setIndex(appendStartIndex[0]++);
        cols.add(hiveCol);
    });
    initializeHiveTable(cols);
}
Also used : HiveColumn(com.qlangtech.tis.hive.HiveColumn)

Example 4 with HiveColumn

use of com.qlangtech.tis.hive.HiveColumn in project plugins by qlangtech.

the class BasicEngineJob method getCols.

private List<HiveColumn> getCols() {
    // try {
    // (List<Configuration>) TisDataXHiveWriter.jobColumnsField.get(this);
    List<Configuration> cols = this.columns;
    AtomicInteger index = new AtomicInteger();
    return cols.stream().map((c) -> {
        HiveColumn hivCol = new HiveColumn();
        SupportHiveDataType columnType = SupportHiveDataType.valueOf(StringUtils.upperCase(c.getString(HdfsColMeta.KEY_TYPE)));
        String name = StringUtils.remove(c.getString(HdfsColMeta.KEY_NAME), "`");
        if (StringUtils.isBlank(name)) {
            throw new IllegalStateException("col name can not be blank");
        }
        hivCol.setName(name);
        hivCol.setType(columnType.name());
        hivCol.setIndex(index.getAndIncrement());
        return hivCol;
    }).collect(Collectors.toList());
// } catch (IllegalAccessException e) {
// throw new RuntimeException(e);
// }
}
Also used : IDumpTable(com.qlangtech.tis.fullbuild.indexbuild.IDumpTable) StringUtils(org.apache.commons.lang.StringUtils) Connection(java.sql.Connection) HdfsFileSystemFactory(com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory) LoggerFactory(org.slf4j.LoggerFactory) Callable(java.util.concurrent.Callable) ITaskContext(com.qlangtech.tis.fs.ITaskContext) Configuration(com.alibaba.datax.common.util.Configuration) HdfsWriterErrorCode(com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriterErrorCode) SQLException(java.sql.SQLException) Lists(com.google.common.collect.Lists) HdfsPath(com.qlangtech.tis.hdfs.impl.HdfsPath) CollectionUtils(org.apache.commons.collections.CollectionUtils) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Path(org.apache.hadoop.fs.Path) BindHiveTableTool(com.qlangtech.tis.dump.hive.BindHiveTableTool) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) Logger(org.slf4j.Logger) HdfsFileType(com.qlangtech.tis.hive.HdfsFileType) EntityName(com.qlangtech.tis.sql.parser.tuple.creator.EntityName) HiveColumn(com.qlangtech.tis.hive.HiveColumn) ExceptionUtils(org.apache.commons.lang.exception.ExceptionUtils) IOException(java.io.IOException) JoinHiveTask(com.qlangtech.tis.fullbuild.taskflow.hive.JoinHiveTask) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) HdfsColMeta(com.alibaba.datax.plugin.writer.hdfswriter.HdfsColMeta) List(java.util.List) SupportHiveDataType(com.alibaba.datax.plugin.writer.hdfswriter.SupportHiveDataType) Collections(java.util.Collections) HdfsFormat(com.qlangtech.tis.hive.HdfsFormat) Configuration(com.alibaba.datax.common.util.Configuration) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) HiveColumn(com.qlangtech.tis.hive.HiveColumn) SupportHiveDataType(com.alibaba.datax.plugin.writer.hdfswriter.SupportHiveDataType)

Aggregations

HiveColumn (com.qlangtech.tis.hive.HiveColumn)4 Configuration (com.alibaba.datax.common.util.Configuration)1 HdfsColMeta (com.alibaba.datax.plugin.writer.hdfswriter.HdfsColMeta)1 HdfsWriterErrorCode (com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriterErrorCode)1 SupportHiveDataType (com.alibaba.datax.plugin.writer.hdfswriter.SupportHiveDataType)1 JSONArray (com.alibaba.fastjson.JSONArray)1 JSONObject (com.alibaba.fastjson.JSONObject)1 Lists (com.google.common.collect.Lists)1 BindHiveTableTool (com.qlangtech.tis.dump.hive.BindHiveTableTool)1 ITISFileSystem (com.qlangtech.tis.fs.ITISFileSystem)1 ITaskContext (com.qlangtech.tis.fs.ITaskContext)1 IDumpTable (com.qlangtech.tis.fullbuild.indexbuild.IDumpTable)1 JoinHiveTask (com.qlangtech.tis.fullbuild.taskflow.hive.JoinHiveTask)1 HdfsFileSystemFactory (com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory)1 HdfsPath (com.qlangtech.tis.hdfs.impl.HdfsPath)1 HdfsFileType (com.qlangtech.tis.hive.HdfsFileType)1 HdfsFormat (com.qlangtech.tis.hive.HdfsFormat)1 HiveInsertFromSelectParser (com.qlangtech.tis.hive.HiveInsertFromSelectParser)1 EntityName (com.qlangtech.tis.sql.parser.tuple.creator.EntityName)1 IOException (java.io.IOException)1