Search in sources :

Example 1 with SupportHiveDataType

use of com.alibaba.datax.plugin.writer.hdfswriter.SupportHiveDataType in project plugins by qlangtech.

the class BasicEngineJob method getCols.

private List<HiveColumn> getCols() {
    // try {
    // (List<Configuration>) TisDataXHiveWriter.jobColumnsField.get(this);
    List<Configuration> cols = this.columns;
    AtomicInteger index = new AtomicInteger();
    return cols.stream().map((c) -> {
        HiveColumn hivCol = new HiveColumn();
        SupportHiveDataType columnType = SupportHiveDataType.valueOf(StringUtils.upperCase(c.getString(HdfsColMeta.KEY_TYPE)));
        String name = StringUtils.remove(c.getString(HdfsColMeta.KEY_NAME), "`");
        if (StringUtils.isBlank(name)) {
            throw new IllegalStateException("col name can not be blank");
        }
        hivCol.setName(name);
        hivCol.setType(columnType.name());
        hivCol.setIndex(index.getAndIncrement());
        return hivCol;
    }).collect(Collectors.toList());
// } catch (IllegalAccessException e) {
// throw new RuntimeException(e);
// }
}
Also used : IDumpTable(com.qlangtech.tis.fullbuild.indexbuild.IDumpTable) StringUtils(org.apache.commons.lang.StringUtils) Connection(java.sql.Connection) HdfsFileSystemFactory(com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory) LoggerFactory(org.slf4j.LoggerFactory) Callable(java.util.concurrent.Callable) ITaskContext(com.qlangtech.tis.fs.ITaskContext) Configuration(com.alibaba.datax.common.util.Configuration) HdfsWriterErrorCode(com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriterErrorCode) SQLException(java.sql.SQLException) Lists(com.google.common.collect.Lists) HdfsPath(com.qlangtech.tis.hdfs.impl.HdfsPath) CollectionUtils(org.apache.commons.collections.CollectionUtils) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Path(org.apache.hadoop.fs.Path) BindHiveTableTool(com.qlangtech.tis.dump.hive.BindHiveTableTool) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) Logger(org.slf4j.Logger) HdfsFileType(com.qlangtech.tis.hive.HdfsFileType) EntityName(com.qlangtech.tis.sql.parser.tuple.creator.EntityName) HiveColumn(com.qlangtech.tis.hive.HiveColumn) ExceptionUtils(org.apache.commons.lang.exception.ExceptionUtils) IOException(java.io.IOException) JoinHiveTask(com.qlangtech.tis.fullbuild.taskflow.hive.JoinHiveTask) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) HdfsColMeta(com.alibaba.datax.plugin.writer.hdfswriter.HdfsColMeta) List(java.util.List) SupportHiveDataType(com.alibaba.datax.plugin.writer.hdfswriter.SupportHiveDataType) Collections(java.util.Collections) HdfsFormat(com.qlangtech.tis.hive.HdfsFormat) Configuration(com.alibaba.datax.common.util.Configuration) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) HiveColumn(com.qlangtech.tis.hive.HiveColumn) SupportHiveDataType(com.alibaba.datax.plugin.writer.hdfswriter.SupportHiveDataType)

Aggregations

Configuration (com.alibaba.datax.common.util.Configuration)1 HdfsColMeta (com.alibaba.datax.plugin.writer.hdfswriter.HdfsColMeta)1 HdfsWriterErrorCode (com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriterErrorCode)1 SupportHiveDataType (com.alibaba.datax.plugin.writer.hdfswriter.SupportHiveDataType)1 Lists (com.google.common.collect.Lists)1 BindHiveTableTool (com.qlangtech.tis.dump.hive.BindHiveTableTool)1 ITISFileSystem (com.qlangtech.tis.fs.ITISFileSystem)1 ITaskContext (com.qlangtech.tis.fs.ITaskContext)1 IDumpTable (com.qlangtech.tis.fullbuild.indexbuild.IDumpTable)1 JoinHiveTask (com.qlangtech.tis.fullbuild.taskflow.hive.JoinHiveTask)1 HdfsFileSystemFactory (com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory)1 HdfsPath (com.qlangtech.tis.hdfs.impl.HdfsPath)1 HdfsFileType (com.qlangtech.tis.hive.HdfsFileType)1 HdfsFormat (com.qlangtech.tis.hive.HdfsFormat)1 HiveColumn (com.qlangtech.tis.hive.HiveColumn)1 EntityName (com.qlangtech.tis.sql.parser.tuple.creator.EntityName)1 IOException (java.io.IOException)1 Connection (java.sql.Connection)1 SQLException (java.sql.SQLException)1 Collections (java.util.Collections)1