Search in sources :

Example 91 with HoodieException

use of org.apache.hudi.exception.HoodieException in project hudi by apache.

the class FlinkOptions method allOptions.

/**
 * Returns all the config options.
 */
public static List<ConfigOption<?>> allOptions() {
    Field[] declaredFields = FlinkOptions.class.getDeclaredFields();
    List<ConfigOption<?>> options = new ArrayList<>();
    for (Field field : declaredFields) {
        if (java.lang.reflect.Modifier.isStatic(field.getModifiers()) && field.getType().equals(ConfigOption.class)) {
            try {
                options.add((ConfigOption<?>) field.get(ConfigOption.class));
            } catch (IllegalAccessException e) {
                throw new HoodieException("Error while fetching static config option", e);
            }
        }
    }
    return options;
}
Also used : ConfigOption(org.apache.flink.configuration.ConfigOption) Field(java.lang.reflect.Field) ArrayList(java.util.ArrayList) HoodieException(org.apache.hudi.exception.HoodieException)

Example 92 with HoodieException

use of org.apache.hudi.exception.HoodieException in project hudi by apache.

the class ClusteringTestUtils method createClusteringPlan.

public static HoodieClusteringPlan createClusteringPlan(HoodieTableMetaClient metaClient, String instantTime, String fileId) {
    try {
        String basePath = metaClient.getBasePath();
        String partition = DEFAULT_PARTITION_PATHS[0];
        createBaseFile(basePath, partition, instantTime, fileId, 1);
        FileSlice slice = new FileSlice(partition, instantTime, fileId);
        slice.setBaseFile(new CompactionTestUtils.DummyHoodieBaseFile(Paths.get(basePath, partition, baseFileName(instantTime, fileId)).toString()));
        List<FileSlice>[] fileSliceGroups = new List[] { Collections.singletonList(slice) };
        HoodieClusteringPlan clusteringPlan = ClusteringUtils.createClusteringPlan("strategy", new HashMap<>(), fileSliceGroups, Collections.emptyMap());
        return clusteringPlan;
    } catch (Exception e) {
        throw new HoodieException(e.getMessage(), e);
    }
}
Also used : FileSlice(org.apache.hudi.common.model.FileSlice) List(java.util.List) HoodieException(org.apache.hudi.exception.HoodieException) HoodieException(org.apache.hudi.exception.HoodieException) HoodieClusteringPlan(org.apache.hudi.avro.model.HoodieClusteringPlan)

Example 93 with HoodieException

use of org.apache.hudi.exception.HoodieException in project hudi by apache.

the class HoodieBackedTableMetadataWriter method initializeFileGroups.

/**
 * Initialize file groups for a partition. For file listing, we just have one file group.
 *
 * All FileGroups for a given metadata partition has a fixed prefix as per the {@link MetadataPartitionType#getFileIdPrefix()}.
 * Each file group is suffixed with 4 digits with increments of 1 starting with 0000.
 *
 * Lets say we configure 10 file groups for record level index partition, and prefix as "record-index-bucket-"
 * File groups will be named as :
 *    record-index-bucket-0000, .... -> ..., record-index-bucket-0009
 */
private void initializeFileGroups(HoodieTableMetaClient dataMetaClient, MetadataPartitionType metadataPartition, String instantTime, int fileGroupCount) throws IOException {
    final HashMap<HeaderMetadataType, String> blockHeader = new HashMap<>();
    blockHeader.put(HeaderMetadataType.INSTANT_TIME, instantTime);
    // Archival of data table has a dependency on compaction(base files) in metadata table.
    // It is assumed that as of time Tx of base instant (/compaction time) in metadata table,
    // all commits in data table is in sync with metadata table. So, we always start with log file for any fileGroup.
    final HoodieDeleteBlock block = new HoodieDeleteBlock(new HoodieKey[0], blockHeader);
    LOG.info(String.format("Creating %d file groups for partition %s with base fileId %s at instant time %s", fileGroupCount, metadataPartition.getPartitionPath(), metadataPartition.getFileIdPrefix(), instantTime));
    for (int i = 0; i < fileGroupCount; ++i) {
        final String fileGroupFileId = String.format("%s%04d", metadataPartition.getFileIdPrefix(), i);
        try {
            HoodieLogFormat.Writer writer = HoodieLogFormat.newWriterBuilder().onParentPath(FSUtils.getPartitionPath(metadataWriteConfig.getBasePath(), metadataPartition.getPartitionPath())).withFileId(fileGroupFileId).overBaseCommit(instantTime).withLogVersion(HoodieLogFile.LOGFILE_BASE_VERSION).withFileSize(0L).withSizeThreshold(metadataWriteConfig.getLogFileMaxSize()).withFs(dataMetaClient.getFs()).withRolloverLogWriteToken(HoodieLogFormat.DEFAULT_WRITE_TOKEN).withLogWriteToken(HoodieLogFormat.DEFAULT_WRITE_TOKEN).withFileExtension(HoodieLogFile.DELTA_EXTENSION).build();
            writer.appendBlock(block);
            writer.close();
        } catch (InterruptedException e) {
            throw new HoodieException("Failed to created fileGroup " + fileGroupFileId + " for partition " + metadataPartition.getPartitionPath(), e);
        }
    }
}
Also used : HashMap(java.util.HashMap) HoodieDeleteBlock(org.apache.hudi.common.table.log.block.HoodieDeleteBlock) HoodieLogFormat(org.apache.hudi.common.table.log.HoodieLogFormat) HeaderMetadataType(org.apache.hudi.common.table.log.block.HoodieLogBlock.HeaderMetadataType) HoodieException(org.apache.hudi.exception.HoodieException)

Example 94 with HoodieException

use of org.apache.hudi.exception.HoodieException in project hudi by apache.

the class JmxReporterServer method start.

public void start() {
    ValidationUtils.checkArgument(reporter != null && connector != null, "reporter or connector cannot be null!");
    try {
        connector.start();
        reporter.start();
    } catch (Exception e) {
        throw new HoodieException("connector or reporter start failed", e);
    }
}
Also used : HoodieException(org.apache.hudi.exception.HoodieException) NoSuchObjectException(java.rmi.NoSuchObjectException) HoodieException(org.apache.hudi.exception.HoodieException) IOException(java.io.IOException)

Example 95 with HoodieException

use of org.apache.hudi.exception.HoodieException in project hudi by apache.

the class MetricsReporterFactory method createReporter.

public static MetricsReporter createReporter(HoodieWriteConfig config, MetricRegistry registry) {
    String reporterClassName = config.getMetricReporterClassName();
    if (!StringUtils.isNullOrEmpty(reporterClassName)) {
        Object instance = ReflectionUtils.loadClass(reporterClassName, new Class<?>[] { Properties.class, MetricRegistry.class }, config.getProps(), registry);
        if (!(instance instanceof CustomizableMetricsReporter)) {
            throw new HoodieException(config.getMetricReporterClassName() + " is not a subclass of CustomizableMetricsReporter");
        }
        return (MetricsReporter) instance;
    }
    MetricsReporterType type = config.getMetricsReporterType();
    MetricsReporter reporter = null;
    switch(type) {
        case GRAPHITE:
            reporter = new MetricsGraphiteReporter(config, registry);
            break;
        case INMEMORY:
            reporter = new InMemoryMetricsReporter();
            break;
        case JMX:
            reporter = new JmxMetricsReporter(config, registry);
            break;
        case DATADOG:
            reporter = new DatadogMetricsReporter(config, registry);
            break;
        case PROMETHEUS_PUSHGATEWAY:
            reporter = new PushGatewayMetricsReporter(config, registry);
            break;
        case PROMETHEUS:
            reporter = new PrometheusReporter(config, registry);
            break;
        case CONSOLE:
            reporter = new ConsoleMetricsReporter(registry);
            break;
        case CLOUDWATCH:
            reporter = new CloudWatchMetricsReporter(config, registry);
            break;
        default:
            LOG.error("Reporter type[" + type + "] is not supported.");
            break;
    }
    return reporter;
}
Also used : HoodieException(org.apache.hudi.exception.HoodieException) PrometheusReporter(org.apache.hudi.metrics.prometheus.PrometheusReporter) CloudWatchMetricsReporter(org.apache.hudi.metrics.cloudwatch.CloudWatchMetricsReporter) CloudWatchMetricsReporter(org.apache.hudi.metrics.cloudwatch.CloudWatchMetricsReporter) DatadogMetricsReporter(org.apache.hudi.metrics.datadog.DatadogMetricsReporter) CustomizableMetricsReporter(org.apache.hudi.metrics.custom.CustomizableMetricsReporter) PushGatewayMetricsReporter(org.apache.hudi.metrics.prometheus.PushGatewayMetricsReporter) DatadogMetricsReporter(org.apache.hudi.metrics.datadog.DatadogMetricsReporter) CustomizableMetricsReporter(org.apache.hudi.metrics.custom.CustomizableMetricsReporter) PushGatewayMetricsReporter(org.apache.hudi.metrics.prometheus.PushGatewayMetricsReporter)

Aggregations

HoodieException (org.apache.hudi.exception.HoodieException)171 IOException (java.io.IOException)87 Path (org.apache.hadoop.fs.Path)45 Schema (org.apache.avro.Schema)35 HoodieIOException (org.apache.hudi.exception.HoodieIOException)35 List (java.util.List)30 ArrayList (java.util.ArrayList)27 HoodieTableMetaClient (org.apache.hudi.common.table.HoodieTableMetaClient)23 Collectors (java.util.stream.Collectors)21 HoodieInstant (org.apache.hudi.common.table.timeline.HoodieInstant)19 Option (org.apache.hudi.common.util.Option)19 HoodieTimeline (org.apache.hudi.common.table.timeline.HoodieTimeline)18 Map (java.util.Map)16 HoodieRecord (org.apache.hudi.common.model.HoodieRecord)16 GenericRecord (org.apache.avro.generic.GenericRecord)15 Arrays (java.util.Arrays)14 HoodieLogFile (org.apache.hudi.common.model.HoodieLogFile)14 Logger (org.apache.log4j.Logger)14 FileStatus (org.apache.hadoop.fs.FileStatus)13 HoodieCommitMetadata (org.apache.hudi.common.model.HoodieCommitMetadata)13