Search in sources :

Example 21 with HoodieException

use of org.apache.hudi.exception.HoodieException in project hudi by apache.

the class HoodieDefaultTimeline method setInstants.

public void setInstants(List<HoodieInstant> instants) {
    this.instants = instants;
    final MessageDigest md;
    try {
        md = MessageDigest.getInstance(HASHING_ALGORITHM);
        this.instants.forEach(i -> md.update(StringUtils.joinUsingDelim("_", i.getTimestamp(), i.getAction(), i.getState().name()).getBytes()));
    } catch (NoSuchAlgorithmException nse) {
        throw new HoodieException(nse);
    }
    this.timelineHash = StringUtils.toHexString(md.digest());
}
Also used : HoodieException(org.apache.hudi.exception.HoodieException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) MessageDigest(java.security.MessageDigest)

Example 22 with HoodieException

use of org.apache.hudi.exception.HoodieException in project hudi by apache.

the class MarkerUtils method writeMarkerTypeToFile.

/**
 * Writes the marker type to the file `MARKERS.type`.
 *
 * @param markerType marker type.
 * @param fileSystem file system to use.
 * @param markerDir  marker directory.
 */
public static void writeMarkerTypeToFile(MarkerType markerType, FileSystem fileSystem, String markerDir) {
    Path markerTypeFilePath = new Path(markerDir, MARKER_TYPE_FILENAME);
    FSDataOutputStream fsDataOutputStream = null;
    BufferedWriter bufferedWriter = null;
    try {
        fsDataOutputStream = fileSystem.create(markerTypeFilePath, false);
        bufferedWriter = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
        bufferedWriter.write(markerType.toString());
    } catch (IOException e) {
        throw new HoodieException("Failed to create marker type file " + markerTypeFilePath.toString() + "; " + e.getMessage(), e);
    } finally {
        closeQuietly(bufferedWriter);
        closeQuietly(fsDataOutputStream);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) OutputStreamWriter(java.io.OutputStreamWriter) HoodieException(org.apache.hudi.exception.HoodieException) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) IOException(java.io.IOException) HoodieIOException(org.apache.hudi.exception.HoodieIOException) BufferedWriter(java.io.BufferedWriter)

Example 23 with HoodieException

use of org.apache.hudi.exception.HoodieException in project hudi by apache.

the class OrcUtils method getHoodieKeyIterator.

/**
 * Provides a closable iterator for reading the given ORC file.
 *
 * @param configuration configuration to build fs object
 * @param filePath      The ORC file path
 * @return {@link ClosableIterator} of {@link HoodieKey}s for reading the ORC file
 */
@Override
public ClosableIterator<HoodieKey> getHoodieKeyIterator(Configuration configuration, Path filePath) {
    try {
        Configuration conf = new Configuration(configuration);
        conf.addResource(FSUtils.getFs(filePath.toString(), conf).getConf());
        Reader reader = OrcFile.createReader(filePath, OrcFile.readerOptions(conf));
        Schema readSchema = HoodieAvroUtils.getRecordKeyPartitionPathSchema();
        TypeDescription orcSchema = AvroOrcUtils.createOrcSchema(readSchema);
        RecordReader recordReader = reader.rows(new Options(conf).schema(orcSchema));
        List<String> fieldNames = orcSchema.getFieldNames();
        // column indices for the RECORD_KEY_METADATA_FIELD, PARTITION_PATH_METADATA_FIELD fields
        int keyCol = -1;
        int partitionCol = -1;
        for (int i = 0; i < fieldNames.size(); i++) {
            if (fieldNames.get(i).equals(HoodieRecord.RECORD_KEY_METADATA_FIELD)) {
                keyCol = i;
            }
            if (fieldNames.get(i).equals(HoodieRecord.PARTITION_PATH_METADATA_FIELD)) {
                partitionCol = i;
            }
        }
        if (keyCol == -1 || partitionCol == -1) {
            throw new HoodieException(String.format("Couldn't find row keys or partition path in %s.", filePath));
        }
        return new OrcReaderIterator<>(recordReader, readSchema, orcSchema);
    } catch (IOException e) {
        throw new HoodieIOException("Failed to open reader from ORC file:" + filePath, e);
    }
}
Also used : Options(org.apache.orc.Reader.Options) Configuration(org.apache.hadoop.conf.Configuration) Schema(org.apache.avro.Schema) RecordReader(org.apache.orc.RecordReader) Reader(org.apache.orc.Reader) RecordReader(org.apache.orc.RecordReader) HoodieException(org.apache.hudi.exception.HoodieException) IOException(java.io.IOException) HoodieIOException(org.apache.hudi.exception.HoodieIOException) HoodieIOException(org.apache.hudi.exception.HoodieIOException) TypeDescription(org.apache.orc.TypeDescription)

Example 24 with HoodieException

use of org.apache.hudi.exception.HoodieException in project hudi by apache.

the class HoodieTableSource method getStreamInputFormat.

private InputFormat<RowData, ?> getStreamInputFormat() {
    // if table does not exist, use schema from the DDL
    Schema tableAvroSchema = this.metaClient == null ? inferSchemaFromDdl() : getTableAvroSchema();
    final DataType rowDataType = AvroSchemaConverter.convertToDataType(tableAvroSchema);
    final RowType rowType = (RowType) rowDataType.getLogicalType();
    final RowType requiredRowType = (RowType) getProducedDataType().notNull().getLogicalType();
    final String queryType = this.conf.getString(FlinkOptions.QUERY_TYPE);
    if (FlinkOptions.QUERY_TYPE_SNAPSHOT.equals(queryType)) {
        final HoodieTableType tableType = HoodieTableType.valueOf(this.conf.getString(FlinkOptions.TABLE_TYPE));
        boolean emitDelete = tableType == HoodieTableType.MERGE_ON_READ;
        return mergeOnReadInputFormat(rowType, requiredRowType, tableAvroSchema, rowDataType, Collections.emptyList(), emitDelete);
    }
    String errMsg = String.format("Invalid query type : '%s', options ['%s'] are supported now", queryType, FlinkOptions.QUERY_TYPE_SNAPSHOT);
    throw new HoodieException(errMsg);
}
Also used : ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) Schema(org.apache.avro.Schema) HoodieTableType(org.apache.hudi.common.model.HoodieTableType) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType) HoodieException(org.apache.hudi.exception.HoodieException)

Example 25 with HoodieException

use of org.apache.hudi.exception.HoodieException in project hudi by apache.

the class TimeWait method waitFor.

/**
 * Wait for an interval time.
 */
public void waitFor() {
    try {
        if (waitingTime > timeout) {
            throw new HoodieException("Timeout(" + waitingTime + "ms) while waiting for " + action);
        }
        TimeUnit.MILLISECONDS.sleep(interval);
        waitingTime += interval;
    } catch (InterruptedException e) {
        throw new HoodieException("Error while waiting for " + action, e);
    }
}
Also used : HoodieException(org.apache.hudi.exception.HoodieException)

Aggregations

HoodieException (org.apache.hudi.exception.HoodieException)171 IOException (java.io.IOException)87 Path (org.apache.hadoop.fs.Path)45 Schema (org.apache.avro.Schema)35 HoodieIOException (org.apache.hudi.exception.HoodieIOException)35 List (java.util.List)30 ArrayList (java.util.ArrayList)27 HoodieTableMetaClient (org.apache.hudi.common.table.HoodieTableMetaClient)23 Collectors (java.util.stream.Collectors)21 HoodieInstant (org.apache.hudi.common.table.timeline.HoodieInstant)19 Option (org.apache.hudi.common.util.Option)19 HoodieTimeline (org.apache.hudi.common.table.timeline.HoodieTimeline)18 Map (java.util.Map)16 HoodieRecord (org.apache.hudi.common.model.HoodieRecord)16 GenericRecord (org.apache.avro.generic.GenericRecord)15 Arrays (java.util.Arrays)14 HoodieLogFile (org.apache.hudi.common.model.HoodieLogFile)14 Logger (org.apache.log4j.Logger)14 FileStatus (org.apache.hadoop.fs.FileStatus)13 HoodieCommitMetadata (org.apache.hudi.common.model.HoodieCommitMetadata)13