use of com.qlangtech.tis.hive.HiveColumn in project plugins by qlangtech.
the class BindHiveTableTool method getColumns.
@SuppressWarnings("all")
public static List<HiveColumn> getColumns(ITISFileSystem fs, EntityName hiveTable, String timestamp) throws IOException {
String hivePath = hiveTable.getNameWithPath();
InputStream input = null;
List<HiveColumn> cols = new ArrayList<>();
try {
input = fs.open(fs.getPath(fs.getRootDir() + "/" + hivePath + "/all/" + timestamp + "/" + ColumnMetaData.KEY_COLS_METADATA));
// input = fileSystem.open(path);
String content = IOUtils.toString(input, TisUTF8.getName());
JSONArray array = (JSONArray) JSON.parse(content);
for (Object anArray : array) {
JSONObject o = (JSONObject) anArray;
HiveColumn col = new HiveColumn();
col.setName(o.getString("key"));
col.setIndex(o.getIntValue("index"));
col.setType(getHiveType(o.getIntValue("type")).name());
cols.add(col);
}
} finally {
IOUtils.closeQuietly(input);
}
return cols;
}
use of com.qlangtech.tis.hive.HiveColumn in project plugins by qlangtech.
the class UnionHiveTask method getParsersString.
private String getParsersString() {
StringBuilder sb = new StringBuilder();
int parserSize = parserList.size();
int parserCnt = 0;
for (HiveInsertFromSelectParser parser : parserList) {
Map<String, HiveColumn> columnMap = parser.getColsMap();
sb.append("SELECT ");
int columnSize = columnSet.size();
int columnCnt = 0;
for (String column : columnSet) {
if (columnMap.containsKey(column)) {
HiveColumn hiveColumn = columnMap.get(column);
if (hiveColumn.hasAliasName()) {
sb.append(hiveColumn.getRawName()).append(" AS ").append(column);
} else if (hiveColumn.hasDefaultValue()) {
sb.append(hiveColumn.getDefalutValue()).append(" AS ").append(column);
} else {
sb.append(hiveColumn.getName());
}
} else {
sb.append("'' AS ").append(column);
}
if (++columnCnt < columnSize) {
sb.append(", ");
}
}
sb.append(" FROM `").append(parser.getSourceTableName()).append("`");
if (parser.getWhere() != null) {
sb.append(" where ").append(getConditionString(parser.getWhere().getChild(0)));
}
if (++parserCnt < parserSize) {
sb.append("\nUNION ALL\n");
}
}
return sb.toString();
}
use of com.qlangtech.tis.hive.HiveColumn in project plugins by qlangtech.
the class BasicEngineJob method prepare.
public void prepare() {
super.prepare();
this.colsExcludePartitionCols = getCols();
int[] appendStartIndex = new int[] { colsExcludePartitionCols.size() };
List<HiveColumn> cols = Lists.newArrayList(colsExcludePartitionCols);
IDumpTable.preservedPsCols.forEach((c) -> {
HiveColumn hiveCol = new HiveColumn();
hiveCol.setName(c);
hiveCol.setType(SupportHiveDataType.STRING.name());
hiveCol.setIndex(appendStartIndex[0]++);
cols.add(hiveCol);
});
initializeHiveTable(cols);
}
use of com.qlangtech.tis.hive.HiveColumn in project plugins by qlangtech.
the class BasicEngineJob method getCols.
private List<HiveColumn> getCols() {
// try {
// (List<Configuration>) TisDataXHiveWriter.jobColumnsField.get(this);
List<Configuration> cols = this.columns;
AtomicInteger index = new AtomicInteger();
return cols.stream().map((c) -> {
HiveColumn hivCol = new HiveColumn();
SupportHiveDataType columnType = SupportHiveDataType.valueOf(StringUtils.upperCase(c.getString(HdfsColMeta.KEY_TYPE)));
String name = StringUtils.remove(c.getString(HdfsColMeta.KEY_NAME), "`");
if (StringUtils.isBlank(name)) {
throw new IllegalStateException("col name can not be blank");
}
hivCol.setName(name);
hivCol.setType(columnType.name());
hivCol.setIndex(index.getAndIncrement());
return hivCol;
}).collect(Collectors.toList());
// } catch (IllegalAccessException e) {
// throw new RuntimeException(e);
// }
}
Aggregations