Search in sources :

Example 1 with HiveTimePartitionInfo

use of org.apache.inlong.sort.protocol.sink.HiveSinkInfo.HiveTimePartitionInfo in project incubator-inlong by apache.

the class HiveSinkITCase method prepareSinkSchema.

private HiveSinkInfo prepareSinkSchema() {
    final FieldInfo f1 = new FieldInfo(fieldName1, new TimestampFormatInfo("MILLIS"));
    final FieldInfo f2 = new FieldInfo(fieldName2, IntFormatInfo.INSTANCE);
    final FieldInfo f3 = new FieldInfo(fieldName3, StringFormatInfo.INSTANCE);
    final FieldInfo f4 = new FieldInfo(fieldName4, StringFormatInfo.INSTANCE);
    final HiveTimePartitionInfo timePartition = new HiveTimePartitionInfo(f1.getName(), timePartitionFormat);
    final HiveFieldPartitionInfo fieldPartition = new HiveFieldPartitionInfo(f2.getName());
    return new HiveSinkInfo(new FieldInfo[] { f1, f2, f3, f4 }, hiveMetastoreUrl, hiveDb, hiveTable, hiveUsername, hivePassword, dfsSchema + hdfsDataDir, new HivePartitionInfo[] { timePartition, fieldPartition }, new TextFileFormat("\t".charAt(0)));
}
Also used : HiveFieldPartitionInfo(org.apache.inlong.sort.protocol.sink.HiveSinkInfo.HiveFieldPartitionInfo) TimestampFormatInfo(org.apache.inlong.sort.formats.common.TimestampFormatInfo) HiveTimePartitionInfo(org.apache.inlong.sort.protocol.sink.HiveSinkInfo.HiveTimePartitionInfo) HiveSinkInfo(org.apache.inlong.sort.protocol.sink.HiveSinkInfo) TextFileFormat(org.apache.inlong.sort.protocol.sink.HiveSinkInfo.TextFileFormat) FieldInfo(org.apache.inlong.sort.protocol.FieldInfo)

Example 2 with HiveTimePartitionInfo

use of org.apache.inlong.sort.protocol.sink.HiveSinkInfo.HiveTimePartitionInfo in project incubator-inlong by apache.

the class SinkInfoUtils method createHiveSinkInfo.

/**
 * Create Hive sink info.
 */
private static HiveSinkInfo createHiveSinkInfo(HiveSinkResponse hiveInfo, List<FieldInfo> sinkFields) {
    if (hiveInfo.getJdbcUrl() == null) {
        throw new RuntimeException(String.format("HiveSink={%s} server url cannot be empty", hiveInfo));
    }
    if (CollectionUtils.isEmpty(hiveInfo.getFieldList())) {
        throw new RuntimeException(String.format("HiveSink={%s} fields cannot be empty", hiveInfo));
    }
    // Use the field separator in Hive, the default is TextFile
    Character separator = (char) Integer.parseInt(hiveInfo.getDataSeparator());
    HiveFileFormat fileFormat;
    String format = hiveInfo.getFileFormat();
    if (Constant.FILE_FORMAT_ORC.equalsIgnoreCase(format)) {
        fileFormat = new HiveSinkInfo.OrcFileFormat(1000);
    } else if (Constant.FILE_FORMAT_SEQUENCE.equalsIgnoreCase(format)) {
        fileFormat = new HiveSinkInfo.SequenceFileFormat(separator, 100);
    } else if (Constant.FILE_FORMAT_PARQUET.equalsIgnoreCase(format)) {
        fileFormat = new HiveSinkInfo.ParquetFileFormat();
    } else {
        fileFormat = new HiveSinkInfo.TextFileFormat(separator);
    }
    // The primary partition field, in Sink must be HiveTimePartitionInfo
    List<HivePartitionInfo> partitionList = new ArrayList<>();
    String primary = hiveInfo.getPrimaryPartition();
    if (StringUtils.isNotEmpty(primary)) {
        // Hive partitions are by day, hour, and minute
        String unit = hiveInfo.getPartitionUnit();
        HiveTimePartitionInfo timePartitionInfo = new HiveTimePartitionInfo(primary, PARTITION_TIME_FORMAT_MAP.get(unit));
        partitionList.add(timePartitionInfo);
    }
    // TODO the type be set according to the type of the field itself.
    if (StringUtils.isNotEmpty(hiveInfo.getSecondaryPartition())) {
        partitionList.add(new HiveSinkInfo.HiveFieldPartitionInfo(hiveInfo.getSecondaryPartition()));
    }
    // dataPath = hdfsUrl + / + warehouseDir + / + dbName + .db/ + tableName
    StringBuilder dataPathBuilder = new StringBuilder();
    String hdfsUrl = hiveInfo.getHdfsDefaultFs();
    String warehouseDir = hiveInfo.getWarehouseDir();
    if (hdfsUrl.endsWith("/")) {
        dataPathBuilder.append(hdfsUrl, 0, hdfsUrl.length() - 1);
    } else {
        dataPathBuilder.append(hdfsUrl);
    }
    if (warehouseDir.endsWith("/")) {
        dataPathBuilder.append(warehouseDir, 0, warehouseDir.length() - 1);
    } else {
        dataPathBuilder.append(warehouseDir);
    }
    String dataPath = dataPathBuilder.append("/").append(hiveInfo.getDbName()).append(".db/").append(hiveInfo.getTableName()).toString();
    return new HiveSinkInfo(sinkFields.toArray(new FieldInfo[0]), hiveInfo.getJdbcUrl(), hiveInfo.getDbName(), hiveInfo.getTableName(), hiveInfo.getUsername(), hiveInfo.getPassword(), dataPath, partitionList.toArray(new HiveSinkInfo.HivePartitionInfo[0]), fileFormat);
}
Also used : HiveTimePartitionInfo(org.apache.inlong.sort.protocol.sink.HiveSinkInfo.HiveTimePartitionInfo) ArrayList(java.util.ArrayList) HivePartitionInfo(org.apache.inlong.sort.protocol.sink.HiveSinkInfo.HivePartitionInfo) HiveSinkInfo(org.apache.inlong.sort.protocol.sink.HiveSinkInfo) HiveFileFormat(org.apache.inlong.sort.protocol.sink.HiveSinkInfo.HiveFileFormat) FieldInfo(org.apache.inlong.sort.protocol.FieldInfo)

Aggregations

FieldInfo (org.apache.inlong.sort.protocol.FieldInfo)2 HiveSinkInfo (org.apache.inlong.sort.protocol.sink.HiveSinkInfo)2 HiveTimePartitionInfo (org.apache.inlong.sort.protocol.sink.HiveSinkInfo.HiveTimePartitionInfo)2 ArrayList (java.util.ArrayList)1 TimestampFormatInfo (org.apache.inlong.sort.formats.common.TimestampFormatInfo)1 HiveFieldPartitionInfo (org.apache.inlong.sort.protocol.sink.HiveSinkInfo.HiveFieldPartitionInfo)1 HiveFileFormat (org.apache.inlong.sort.protocol.sink.HiveSinkInfo.HiveFileFormat)1 HivePartitionInfo (org.apache.inlong.sort.protocol.sink.HiveSinkInfo.HivePartitionInfo)1 TextFileFormat (org.apache.inlong.sort.protocol.sink.HiveSinkInfo.TextFileFormat)1