use of org.apache.hudi.exception.HoodieException in project hudi by apache.
the class HoodieDefaultTimeline method setInstants.
public void setInstants(List<HoodieInstant> instants) {
this.instants = instants;
final MessageDigest md;
try {
md = MessageDigest.getInstance(HASHING_ALGORITHM);
this.instants.forEach(i -> md.update(StringUtils.joinUsingDelim("_", i.getTimestamp(), i.getAction(), i.getState().name()).getBytes()));
} catch (NoSuchAlgorithmException nse) {
throw new HoodieException(nse);
}
this.timelineHash = StringUtils.toHexString(md.digest());
}
use of org.apache.hudi.exception.HoodieException in project hudi by apache.
the class MarkerUtils method writeMarkerTypeToFile.
/**
* Writes the marker type to the file `MARKERS.type`.
*
* @param markerType marker type.
* @param fileSystem file system to use.
* @param markerDir marker directory.
*/
public static void writeMarkerTypeToFile(MarkerType markerType, FileSystem fileSystem, String markerDir) {
Path markerTypeFilePath = new Path(markerDir, MARKER_TYPE_FILENAME);
FSDataOutputStream fsDataOutputStream = null;
BufferedWriter bufferedWriter = null;
try {
fsDataOutputStream = fileSystem.create(markerTypeFilePath, false);
bufferedWriter = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
bufferedWriter.write(markerType.toString());
} catch (IOException e) {
throw new HoodieException("Failed to create marker type file " + markerTypeFilePath.toString() + "; " + e.getMessage(), e);
} finally {
closeQuietly(bufferedWriter);
closeQuietly(fsDataOutputStream);
}
}
use of org.apache.hudi.exception.HoodieException in project hudi by apache.
the class OrcUtils method getHoodieKeyIterator.
/**
* Provides a closable iterator for reading the given ORC file.
*
* @param configuration configuration to build fs object
* @param filePath The ORC file path
* @return {@link ClosableIterator} of {@link HoodieKey}s for reading the ORC file
*/
@Override
public ClosableIterator<HoodieKey> getHoodieKeyIterator(Configuration configuration, Path filePath) {
try {
Configuration conf = new Configuration(configuration);
conf.addResource(FSUtils.getFs(filePath.toString(), conf).getConf());
Reader reader = OrcFile.createReader(filePath, OrcFile.readerOptions(conf));
Schema readSchema = HoodieAvroUtils.getRecordKeyPartitionPathSchema();
TypeDescription orcSchema = AvroOrcUtils.createOrcSchema(readSchema);
RecordReader recordReader = reader.rows(new Options(conf).schema(orcSchema));
List<String> fieldNames = orcSchema.getFieldNames();
// column indices for the RECORD_KEY_METADATA_FIELD, PARTITION_PATH_METADATA_FIELD fields
int keyCol = -1;
int partitionCol = -1;
for (int i = 0; i < fieldNames.size(); i++) {
if (fieldNames.get(i).equals(HoodieRecord.RECORD_KEY_METADATA_FIELD)) {
keyCol = i;
}
if (fieldNames.get(i).equals(HoodieRecord.PARTITION_PATH_METADATA_FIELD)) {
partitionCol = i;
}
}
if (keyCol == -1 || partitionCol == -1) {
throw new HoodieException(String.format("Couldn't find row keys or partition path in %s.", filePath));
}
return new OrcReaderIterator<>(recordReader, readSchema, orcSchema);
} catch (IOException e) {
throw new HoodieIOException("Failed to open reader from ORC file:" + filePath, e);
}
}
use of org.apache.hudi.exception.HoodieException in project hudi by apache.
the class HoodieTableSource method getStreamInputFormat.
private InputFormat<RowData, ?> getStreamInputFormat() {
// if table does not exist, use schema from the DDL
Schema tableAvroSchema = this.metaClient == null ? inferSchemaFromDdl() : getTableAvroSchema();
final DataType rowDataType = AvroSchemaConverter.convertToDataType(tableAvroSchema);
final RowType rowType = (RowType) rowDataType.getLogicalType();
final RowType requiredRowType = (RowType) getProducedDataType().notNull().getLogicalType();
final String queryType = this.conf.getString(FlinkOptions.QUERY_TYPE);
if (FlinkOptions.QUERY_TYPE_SNAPSHOT.equals(queryType)) {
final HoodieTableType tableType = HoodieTableType.valueOf(this.conf.getString(FlinkOptions.TABLE_TYPE));
boolean emitDelete = tableType == HoodieTableType.MERGE_ON_READ;
return mergeOnReadInputFormat(rowType, requiredRowType, tableAvroSchema, rowDataType, Collections.emptyList(), emitDelete);
}
String errMsg = String.format("Invalid query type : '%s', options ['%s'] are supported now", queryType, FlinkOptions.QUERY_TYPE_SNAPSHOT);
throw new HoodieException(errMsg);
}
use of org.apache.hudi.exception.HoodieException in project hudi by apache.
the class TimeWait method waitFor.
/**
* Wait for an interval time.
*/
public void waitFor() {
try {
if (waitingTime > timeout) {
throw new HoodieException("Timeout(" + waitingTime + "ms) while waiting for " + action);
}
TimeUnit.MILLISECONDS.sleep(interval);
waitingTime += interval;
} catch (InterruptedException e) {
throw new HoodieException("Error while waiting for " + action, e);
}
}
Aggregations