Search in sources :

Example 1 with Closer

use of org.apache.flink.shaded.guava30.com.google.common.io.Closer in project druid by druid-io.

the class RemoteTaskRunner method stop.

@Override
@LifecycleStop
public void stop() {
    try {
        if (!started) {
            return;
        }
        started = false;
        resourceManagement.stopManagement();
        Closer closer = Closer.create();
        for (ZkWorker zkWorker : zkWorkers.values()) {
            closer.register(zkWorker);
        }
        closer.register(workerPathCache);
        try {
            closer.close();
        } finally {
            workerStatusPathChildrenCacheExecutor.shutdown();
        }
        if (runPendingTasksExec != null) {
            runPendingTasksExec.shutdown();
        }
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }
}
Also used : Closer(com.google.common.io.Closer) KeeperException(org.apache.zookeeper.KeeperException) MalformedURLException(java.net.MalformedURLException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) LifecycleStop(io.druid.java.util.common.lifecycle.LifecycleStop)

Example 2 with Closer

use of org.apache.flink.shaded.guava30.com.google.common.io.Closer in project druid by druid-io.

the class IndexMerger method makeIndexFiles.

protected File makeIndexFiles(final List<IndexableAdapter> indexes, final AggregatorFactory[] metricAggs, final File outDir, final ProgressIndicator progress, final List<String> mergedDimensions, final List<String> mergedMetrics, final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn, final IndexSpec indexSpec) throws IOException {
    List<Metadata> metadataList = Lists.transform(indexes, new Function<IndexableAdapter, Metadata>() {

        @Nullable
        @Override
        public Metadata apply(IndexableAdapter input) {
            return input.getMetadata();
        }
    });
    Metadata segmentMetadata = null;
    if (metricAggs != null) {
        AggregatorFactory[] combiningMetricAggs = new AggregatorFactory[metricAggs.length];
        for (int i = 0; i < metricAggs.length; i++) {
            combiningMetricAggs[i] = metricAggs[i].getCombiningFactory();
        }
        segmentMetadata = Metadata.merge(metadataList, combiningMetricAggs);
    } else {
        segmentMetadata = Metadata.merge(metadataList, null);
    }
    final Map<String, ValueType> valueTypes = Maps.newTreeMap(Ordering.<String>natural().nullsFirst());
    final Map<String, String> metricTypeNames = Maps.newTreeMap(Ordering.<String>natural().nullsFirst());
    final Map<String, ColumnCapabilitiesImpl> columnCapabilities = Maps.newHashMap();
    final List<ColumnCapabilitiesImpl> dimCapabilities = new ArrayList<>();
    for (IndexableAdapter adapter : indexes) {
        for (String dimension : adapter.getDimensionNames()) {
            ColumnCapabilitiesImpl mergedCapabilities = columnCapabilities.get(dimension);
            ColumnCapabilities capabilities = adapter.getCapabilities(dimension);
            if (mergedCapabilities == null) {
                mergedCapabilities = new ColumnCapabilitiesImpl();
            }
            columnCapabilities.put(dimension, mergedCapabilities.merge(capabilities));
        }
        for (String metric : adapter.getMetricNames()) {
            ColumnCapabilitiesImpl mergedCapabilities = columnCapabilities.get(metric);
            ColumnCapabilities capabilities = adapter.getCapabilities(metric);
            if (mergedCapabilities == null) {
                mergedCapabilities = new ColumnCapabilitiesImpl();
            }
            columnCapabilities.put(metric, mergedCapabilities.merge(capabilities));
            valueTypes.put(metric, capabilities.getType());
            metricTypeNames.put(metric, adapter.getMetricType(metric));
        }
    }
    for (String dimension : mergedDimensions) {
        dimCapabilities.add(columnCapabilities.get(dimension));
    }
    Closer closer = Closer.create();
    try {
        final Interval dataInterval;
        final File v8OutDir = new File(outDir, "v8-tmp");
        FileUtils.forceMkdir(v8OutDir);
        registerDeleteDirectory(closer, v8OutDir);
        File tmpPeonFilesDir = new File(v8OutDir, "tmpPeonFiles");
        FileUtils.forceMkdir(tmpPeonFilesDir);
        registerDeleteDirectory(closer, tmpPeonFilesDir);
        final IOPeon ioPeon = new TmpFileIOPeon(tmpPeonFilesDir, true);
        closer.register(ioPeon);
        /*************  Main index.drd file **************/
        progress.progress();
        long startTime = System.currentTimeMillis();
        File indexFile = new File(v8OutDir, "index.drd");
        try (FileOutputStream fileOutputStream = new FileOutputStream(indexFile);
            FileChannel channel = fileOutputStream.getChannel()) {
            channel.write(ByteBuffer.wrap(new byte[] { IndexIO.V8_VERSION }));
            GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.STRING_STRATEGY).writeToChannel(channel);
            GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.STRING_STRATEGY).writeToChannel(channel);
            DateTime minTime = new DateTime(JodaUtils.MAX_INSTANT);
            DateTime maxTime = new DateTime(JodaUtils.MIN_INSTANT);
            for (IndexableAdapter index : indexes) {
                minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart());
                maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd());
            }
            dataInterval = new Interval(minTime, maxTime);
            serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime));
            serializerUtils.writeString(channel, mapper.writeValueAsString(indexSpec.getBitmapSerdeFactory()));
        }
        IndexIO.checkFileSize(indexFile);
        log.info("outDir[%s] completed index.drd in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime);
        /************* Setup Dim Conversions **************/
        progress.progress();
        startTime = System.currentTimeMillis();
        final ArrayList<FileOutputSupplier> dimOuts = Lists.newArrayListWithCapacity(mergedDimensions.size());
        final DimensionHandler[] handlers = makeDimensionHandlers(mergedDimensions, dimCapabilities);
        final List<DimensionMerger> mergers = new ArrayList<>();
        for (int i = 0; i < mergedDimensions.size(); i++) {
            DimensionMergerLegacy merger = handlers[i].makeLegacyMerger(indexSpec, v8OutDir, ioPeon, dimCapabilities.get(i), progress);
            mergers.add(merger);
            merger.writeMergedValueMetadata(indexes);
            FileOutputSupplier dimOut = new FileOutputSupplier(merger.makeDimFile(), true);
            merger.writeValueMetadataToFile(dimOut);
            dimOuts.add(dimOut);
        }
        log.info("outDir[%s] completed dim conversions in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime);
        /************* Walk through data sets and merge them *************/
        progress.progress();
        startTime = System.currentTimeMillis();
        Iterable<Rowboat> theRows = makeRowIterable(indexes, mergedDimensions, mergedMetrics, rowMergerFn, dimCapabilities, handlers, mergers);
        LongSupplierSerializer timeWriter = CompressionFactory.getLongSerializer(ioPeon, "little_end_time", IndexIO.BYTE_ORDER, indexSpec.getLongEncoding(), CompressedObjectStrategy.DEFAULT_COMPRESSION_STRATEGY);
        timeWriter.open();
        ArrayList<MetricColumnSerializer> metWriters = Lists.newArrayListWithCapacity(mergedMetrics.size());
        final CompressedObjectStrategy.CompressionStrategy metCompression = indexSpec.getMetricCompression();
        final CompressionFactory.LongEncodingStrategy longEncoding = indexSpec.getLongEncoding();
        for (String metric : mergedMetrics) {
            ValueType type = valueTypes.get(metric);
            switch(type) {
                case LONG:
                    metWriters.add(new LongMetricColumnSerializer(metric, v8OutDir, ioPeon, metCompression, longEncoding));
                    break;
                case FLOAT:
                    metWriters.add(new FloatMetricColumnSerializer(metric, v8OutDir, ioPeon, metCompression));
                    break;
                case COMPLEX:
                    final String typeName = metricTypeNames.get(metric);
                    ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName);
                    if (serde == null) {
                        throw new ISE("Unknown type[%s]", typeName);
                    }
                    metWriters.add(new ComplexMetricColumnSerializer(metric, v8OutDir, ioPeon, serde));
                    break;
                default:
                    throw new ISE("Unknown type[%s]", type);
            }
        }
        for (MetricColumnSerializer metWriter : metWriters) {
            metWriter.open();
        }
        int rowCount = 0;
        long time = System.currentTimeMillis();
        List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(indexes.size());
        for (IndexableAdapter index : indexes) {
            int[] arr = new int[index.getNumRows()];
            Arrays.fill(arr, INVALID_ROW);
            rowNumConversions.add(IntBuffer.wrap(arr));
        }
        for (Rowboat theRow : theRows) {
            progress.progress();
            timeWriter.add(theRow.getTimestamp());
            final Object[] metrics = theRow.getMetrics();
            for (int i = 0; i < metrics.length; ++i) {
                metWriters.get(i).serialize(metrics[i]);
            }
            Object[] dims = theRow.getDims();
            for (int i = 0; i < dims.length; ++i) {
                mergers.get(i).processMergedRow(dims[i]);
            }
            for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) {
                final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey());
                for (Integer rowNum : comprisedRow.getValue()) {
                    while (conversionBuffer.position() < rowNum) {
                        conversionBuffer.put(INVALID_ROW);
                    }
                    conversionBuffer.put(rowCount);
                }
            }
            if ((++rowCount % 500000) == 0) {
                log.info("outDir[%s] walked 500,000/%,d rows in %,d millis.", v8OutDir, rowCount, System.currentTimeMillis() - time);
                time = System.currentTimeMillis();
            }
        }
        for (IntBuffer rowNumConversion : rowNumConversions) {
            rowNumConversion.rewind();
        }
        final File timeFile = IndexIO.makeTimeFile(v8OutDir, IndexIO.BYTE_ORDER);
        timeFile.delete();
        ByteSink out = Files.asByteSink(timeFile, FileWriteMode.APPEND);
        timeWriter.closeAndConsolidate(out);
        IndexIO.checkFileSize(timeFile);
        for (MetricColumnSerializer metWriter : metWriters) {
            metWriter.close();
        }
        log.info("outDir[%s] completed walk through of %,d rows in %,d millis.", v8OutDir, rowCount, System.currentTimeMillis() - startTime);
        /************ Create Inverted Indexes and Finalize Columns *************/
        startTime = System.currentTimeMillis();
        final File invertedFile = new File(v8OutDir, "inverted.drd");
        Files.touch(invertedFile);
        out = Files.asByteSink(invertedFile, FileWriteMode.APPEND);
        final File geoFile = new File(v8OutDir, "spatial.drd");
        Files.touch(geoFile);
        OutputSupplier<FileOutputStream> spatialOut = Files.newOutputStreamSupplier(geoFile, true);
        for (int i = 0; i < mergedDimensions.size(); i++) {
            DimensionMergerLegacy legacyMerger = (DimensionMergerLegacy) mergers.get(i);
            legacyMerger.writeIndexes(rowNumConversions, closer);
            legacyMerger.writeIndexesToFiles(out, spatialOut);
            legacyMerger.writeRowValuesToFile(dimOuts.get(i));
        }
        log.info("outDir[%s] completed inverted.drd and wrote dimensions in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime);
        final Function<String, String> dimFilenameFunction = new Function<String, String>() {

            @Override
            public String apply(@Nullable String input) {
                String formatString;
                if (columnCapabilities.get(input).isDictionaryEncoded()) {
                    formatString = "dim_%s.drd";
                } else {
                    formatString = String.format("numeric_dim_%%s_%s.drd", IndexIO.BYTE_ORDER);
                }
                return GuavaUtils.formatFunction(formatString).apply(input);
            }
        };
        final ArrayList<String> expectedFiles = Lists.newArrayList(Iterables.concat(Arrays.asList("index.drd", "inverted.drd", "spatial.drd", String.format("time_%s.drd", IndexIO.BYTE_ORDER)), Iterables.transform(mergedDimensions, dimFilenameFunction), Iterables.transform(mergedMetrics, GuavaUtils.formatFunction(String.format("met_%%s_%s.drd", IndexIO.BYTE_ORDER)))));
        if (segmentMetadata != null) {
            writeMetadataToFile(new File(v8OutDir, "metadata.drd"), segmentMetadata);
            log.info("wrote metadata.drd in outDir[%s].", v8OutDir);
            expectedFiles.add("metadata.drd");
        }
        Map<String, File> files = Maps.newLinkedHashMap();
        for (String fileName : expectedFiles) {
            files.put(fileName, new File(v8OutDir, fileName));
        }
        File smooshDir = new File(v8OutDir, "smoosher");
        FileUtils.forceMkdir(smooshDir);
        for (Map.Entry<String, File> entry : Smoosh.smoosh(v8OutDir, smooshDir, files).entrySet()) {
            entry.getValue().delete();
        }
        for (File file : smooshDir.listFiles()) {
            Files.move(file, new File(v8OutDir, file.getName()));
        }
        if (!smooshDir.delete()) {
            log.info("Unable to delete temporary dir[%s], contains[%s]", smooshDir, Arrays.asList(smooshDir.listFiles()));
            throw new IOException(String.format("Unable to delete temporary dir[%s]", smooshDir));
        }
        createIndexDrdFile(IndexIO.V8_VERSION, v8OutDir, GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.STRING_STRATEGY), GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.STRING_STRATEGY), dataInterval, indexSpec.getBitmapSerdeFactory());
        indexIO.getDefaultIndexIOHandler().convertV8toV9(v8OutDir, outDir, indexSpec);
        return outDir;
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }
}
Also used : ArrayList(java.util.ArrayList) CompressedObjectStrategy(io.druid.segment.data.CompressedObjectStrategy) DateTime(org.joda.time.DateTime) ComplexMetricColumnSerializer(io.druid.segment.serde.ComplexMetricColumnSerializer) TmpFileIOPeon(io.druid.segment.data.TmpFileIOPeon) TreeSet(java.util.TreeSet) FileOutputStream(java.io.FileOutputStream) IntBuffer(java.nio.IntBuffer) File(java.io.File) Map(java.util.Map) Nullable(javax.annotation.Nullable) ColumnCapabilitiesImpl(io.druid.segment.column.ColumnCapabilitiesImpl) Interval(org.joda.time.Interval) ComplexMetricSerde(io.druid.segment.serde.ComplexMetricSerde) IOPeon(io.druid.segment.data.IOPeon) TmpFileIOPeon(io.druid.segment.data.TmpFileIOPeon) ColumnCapabilities(io.druid.segment.column.ColumnCapabilities) Function(com.google.common.base.Function) ByteSink(com.google.common.io.ByteSink) ISE(io.druid.java.util.common.ISE) ComplexMetricColumnSerializer(io.druid.segment.serde.ComplexMetricColumnSerializer) Closer(com.google.common.io.Closer) ValueType(io.druid.segment.column.ValueType) FileChannel(java.nio.channels.FileChannel) IOException(java.io.IOException) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CompressionFactory(io.druid.segment.data.CompressionFactory) FileOutputSupplier(io.druid.common.guava.FileOutputSupplier) LongSupplierSerializer(io.druid.segment.data.LongSupplierSerializer)

Example 3 with Closer

use of org.apache.flink.shaded.guava30.com.google.common.io.Closer in project druid by druid-io.

the class QueryableIndexIndexableAdapter method getRows.

@Override
public Iterable<Rowboat> getRows() {
    return new Iterable<Rowboat>() {

        @Override
        public Iterator<Rowboat> iterator() {
            return new Iterator<Rowboat>() {

                final GenericColumn timestamps = input.getColumn(Column.TIME_COLUMN_NAME).getGenericColumn();

                final Closeable[] metrics;

                final Closeable[] columns;

                final Closer closer = Closer.create();

                final int numMetrics = getMetricNames().size();

                final DimensionHandler[] handlers = new DimensionHandler[availableDimensions.size()];

                Collection<DimensionHandler> handlerSet = input.getDimensionHandlers().values();

                int currRow = 0;

                boolean done = false;

                {
                    closer.register(timestamps);
                    handlerSet.toArray(handlers);
                    this.columns = FluentIterable.from(handlerSet).transform(new Function<DimensionHandler, Closeable>() {

                        @Override
                        public Closeable apply(DimensionHandler handler) {
                            Column column = input.getColumn(handler.getDimensionName());
                            return handler.getSubColumn(column);
                        }
                    }).toArray(Closeable.class);
                    for (Closeable column : columns) {
                        closer.register(column);
                    }
                    final Indexed<String> availableMetrics = getMetricNames();
                    metrics = new Closeable[availableMetrics.size()];
                    for (int i = 0; i < metrics.length; ++i) {
                        final Column column = input.getColumn(availableMetrics.get(i));
                        final ValueType type = column.getCapabilities().getType();
                        switch(type) {
                            case FLOAT:
                            case LONG:
                                metrics[i] = column.getGenericColumn();
                                break;
                            case COMPLEX:
                                metrics[i] = column.getComplexColumn();
                                break;
                            default:
                                throw new ISE("Cannot handle type[%s]", type);
                        }
                    }
                    for (Closeable metricColumn : metrics) {
                        closer.register(metricColumn);
                    }
                }

                @Override
                public boolean hasNext() {
                    final boolean hasNext = currRow < numRows;
                    if (!hasNext && !done) {
                        CloseQuietly.close(closer);
                        done = true;
                    }
                    return hasNext;
                }

                @Override
                public Rowboat next() {
                    if (!hasNext()) {
                        throw new NoSuchElementException();
                    }
                    final Object[] dims = new Object[columns.length];
                    int dimIndex = 0;
                    for (final Closeable column : columns) {
                        dims[dimIndex] = handlers[dimIndex].getEncodedKeyComponentFromColumn(column, currRow);
                        dimIndex++;
                    }
                    Object[] metricArray = new Object[numMetrics];
                    for (int i = 0; i < metricArray.length; ++i) {
                        if (metrics[i] instanceof IndexedFloatsGenericColumn) {
                            metricArray[i] = ((GenericColumn) metrics[i]).getFloatSingleValueRow(currRow);
                        } else if (metrics[i] instanceof IndexedLongsGenericColumn) {
                            metricArray[i] = ((GenericColumn) metrics[i]).getLongSingleValueRow(currRow);
                        } else if (metrics[i] instanceof ComplexColumn) {
                            metricArray[i] = ((ComplexColumn) metrics[i]).getRowValue(currRow);
                        }
                    }
                    final Rowboat retVal = new Rowboat(timestamps.getLongSingleValueRow(currRow), dims, metricArray, currRow, handlers);
                    ++currRow;
                    return retVal;
                }

                @Override
                public void remove() {
                    throw new UnsupportedOperationException();
                }
            };
        }
    };
}
Also used : Closer(com.google.common.io.Closer) IndexedLongsGenericColumn(io.druid.segment.column.IndexedLongsGenericColumn) GenericColumn(io.druid.segment.column.GenericColumn) IndexedFloatsGenericColumn(io.druid.segment.column.IndexedFloatsGenericColumn) IndexedIterable(io.druid.segment.data.IndexedIterable) FluentIterable(com.google.common.collect.FluentIterable) ValueType(io.druid.segment.column.ValueType) Closeable(java.io.Closeable) IndexedLongsGenericColumn(io.druid.segment.column.IndexedLongsGenericColumn) IndexedLongsGenericColumn(io.druid.segment.column.IndexedLongsGenericColumn) GenericColumn(io.druid.segment.column.GenericColumn) ComplexColumn(io.druid.segment.column.ComplexColumn) Column(io.druid.segment.column.Column) DictionaryEncodedColumn(io.druid.segment.column.DictionaryEncodedColumn) IndexedFloatsGenericColumn(io.druid.segment.column.IndexedFloatsGenericColumn) IndexedFloatsGenericColumn(io.druid.segment.column.IndexedFloatsGenericColumn) Iterator(java.util.Iterator) Collection(java.util.Collection) ISE(io.druid.java.util.common.ISE) NoSuchElementException(java.util.NoSuchElementException) ComplexColumn(io.druid.segment.column.ComplexColumn)

Example 4 with Closer

use of org.apache.flink.shaded.guava30.com.google.common.io.Closer in project jmxtrans by jmxtrans.

the class Slf4JOutputWriter method logValue.

private void logValue(Server server, Query query, List<String> typeNames, Result result, Map.Entry<String, Object> values) throws IOException {
    Object value = values.getValue();
    if (value != null && isNumeric(value)) {
        Closer closer = Closer.create();
        try {
            closer.register(MDC.putCloseable("server", computeAlias(server)));
            closer.register(MDC.putCloseable("metric", KeyUtils.getKeyString(server, query, result, values, typeNames, null)));
            closer.register(MDC.putCloseable("value", value.toString()));
            if (result.getKeyAlias() != null) {
                closer.register(MDC.putCloseable("resultAlias", result.getKeyAlias()));
            }
            closer.register(MDC.putCloseable("attributeName", result.getAttributeName()));
            closer.register(MDC.putCloseable("key", values.getKey()));
            closer.register(MDC.putCloseable("epoch", valueOf(result.getEpoch())));
            logger.info("");
        } catch (Throwable t) {
            throw closer.rethrow(t);
        } finally {
            closer.close();
        }
    }
}
Also used : Closer(com.google.common.io.Closer)

Example 5 with Closer

use of org.apache.flink.shaded.guava30.com.google.common.io.Closer in project druid by druid-io.

the class Announcer method stop.

@LifecycleStop
public void stop() {
    synchronized (toAnnounce) {
        if (!started) {
            return;
        }
        started = false;
        Closer closer = Closer.create();
        for (PathChildrenCache cache : listeners.values()) {
            closer.register(cache);
        }
        try {
            CloseQuietly.close(closer);
        } finally {
            pathChildrenCacheExecutor.shutdown();
        }
        for (Map.Entry<String, ConcurrentMap<String, byte[]>> entry : announcements.entrySet()) {
            String basePath = entry.getKey();
            for (String announcementPath : entry.getValue().keySet()) {
                unannounce(ZKPaths.makePath(basePath, announcementPath));
            }
        }
        if (!parentsIBuilt.isEmpty()) {
            CuratorTransaction transaction = curator.inTransaction();
            for (String parent : parentsIBuilt) {
                try {
                    transaction = transaction.delete().forPath(parent).and();
                } catch (Exception e) {
                    log.info(e, "Unable to delete parent[%s], boooo.", parent);
                }
            }
            try {
                ((CuratorTransactionFinal) transaction).commit();
            } catch (Exception e) {
                log.info(e, "Unable to commit transaction. Please feed the hamsters");
            }
        }
    }
}
Also used : Closer(com.google.common.io.Closer) PathChildrenCache(org.apache.curator.framework.recipes.cache.PathChildrenCache) CuratorTransaction(org.apache.curator.framework.api.transaction.CuratorTransaction) CuratorTransactionFinal(org.apache.curator.framework.api.transaction.CuratorTransactionFinal) ConcurrentMap(java.util.concurrent.ConcurrentMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) Map(java.util.Map) KeeperException(org.apache.zookeeper.KeeperException) LifecycleStop(io.druid.java.util.common.lifecycle.LifecycleStop)

Aggregations

Closer (com.google.common.io.Closer)200 IOException (java.io.IOException)94 File (java.io.File)22 Test (org.junit.Test)18 Test (org.testng.annotations.Test)18 Closer (org.apache.flink.shaded.guava30.com.google.common.io.Closer)16 Path (org.apache.hadoop.fs.Path)16 ArrayList (java.util.ArrayList)15 Properties (java.util.Properties)14 FileOutputStream (java.io.FileOutputStream)13 FileInputStream (java.io.FileInputStream)12 InputStream (java.io.InputStream)12 Map (java.util.Map)12 OutputStream (java.io.OutputStream)11 UncheckedIOException (java.io.UncheckedIOException)10 NettyShuffleEnvironment (org.apache.flink.runtime.io.network.NettyShuffleEnvironment)9 WorkUnit (org.apache.gobblin.source.workunit.WorkUnit)9 AlluxioException (alluxio.exception.AlluxioException)8 InputStreamReader (java.io.InputStreamReader)8 HashMap (java.util.HashMap)8