Search in sources :

Example 36 with PipedOutputStream

use of java.io.PipedOutputStream in project dhis2-core by dhis2.

the class DefaultAdxDataService method saveDataValueSetInternal.

private ImportSummary saveDataValueSetInternal(InputStream in, ImportOptions importOptions, TaskId id) {
    notifier.clear(id).notify(id, "ADX parsing process started");
    ImportOptions adxImportOptions = ObjectUtils.firstNonNull(importOptions, ImportOptions.getDefaultImportOptions()).instance().setNotificationLevel(NotificationLevel.OFF);
    // Get import options
    IdScheme dataSetIdScheme = importOptions.getIdSchemes().getDataSetIdScheme();
    IdScheme dataElementIdScheme = importOptions.getIdSchemes().getDataElementIdScheme();
    // Create meta-data maps
    CachingMap<String, DataSet> dataSetMap = new CachingMap<>();
    CachingMap<String, DataElement> dataElementMap = new CachingMap<>();
    // Get meta-data maps
    IdentifiableObjectCallable<DataSet> dataSetCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataSet.class, dataSetIdScheme, null);
    IdentifiableObjectCallable<DataElement> dataElementCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataElement.class, dataElementIdScheme, null);
    // Heat cache
    if (importOptions.isPreheatCacheDefaultFalse()) {
        dataSetMap.load(identifiableObjectManager.getAll(DataSet.class), o -> o.getPropertyValue(dataSetIdScheme));
        dataElementMap.load(identifiableObjectManager.getAll(DataElement.class), o -> o.getPropertyValue(dataElementIdScheme));
    }
    XMLReader adxReader = XMLFactory.getXMLReader(in);
    ImportSummary importSummary;
    adxReader.moveToStartElement(AdxDataService.ROOT, AdxDataService.NAMESPACE);
    ExecutorService executor = Executors.newSingleThreadExecutor();
    // Give the DXF import a different notification task ID so it doesn't conflict with notifications from this level.
    TaskId dxfTaskId = new TaskId(TaskCategory.DATAVALUE_IMPORT_INTERNAL, id.getUser());
    int groupCount = 0;
    try (PipedOutputStream pipeOut = new PipedOutputStream()) {
        Future<ImportSummary> futureImportSummary = executor.submit(new AdxPipedImporter(dataValueSetService, adxImportOptions, dxfTaskId, pipeOut, sessionFactory));
        XMLOutputFactory factory = XMLOutputFactory.newInstance();
        XMLStreamWriter dxfWriter = factory.createXMLStreamWriter(pipeOut);
        List<ImportConflict> adxConflicts = new LinkedList<>();
        dxfWriter.writeStartDocument("1.0");
        dxfWriter.writeStartElement("dataValueSet");
        dxfWriter.writeDefaultNamespace("http://dhis2.org/schema/dxf/2.0");
        notifier.notify(id, "Starting to import ADX data groups.");
        while (adxReader.moveToStartElement(AdxDataService.GROUP, AdxDataService.NAMESPACE)) {
            notifier.update(id, "Importing ADX data group: " + groupCount);
            // note this returns conflicts which are detected at ADX level
            adxConflicts.addAll(parseAdxGroupToDxf(adxReader, dxfWriter, adxImportOptions, dataSetMap, dataSetCallable, dataElementMap, dataElementCallable));
            groupCount++;
        }
        // end dataValueSet
        dxfWriter.writeEndElement();
        dxfWriter.writeEndDocument();
        pipeOut.flush();
        importSummary = futureImportSummary.get(TOTAL_MINUTES_TO_WAIT, TimeUnit.MINUTES);
        importSummary.getConflicts().addAll(adxConflicts);
        importSummary.getImportCount().incrementIgnored(adxConflicts.size());
    } catch (AdxException ex) {
        importSummary = new ImportSummary();
        importSummary.setStatus(ImportStatus.ERROR);
        importSummary.setDescription("Data set import failed within group number: " + groupCount);
        importSummary.getConflicts().add(ex.getImportConflict());
        notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
        log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
    } catch (IOException | XMLStreamException | InterruptedException | ExecutionException | TimeoutException ex) {
        importSummary = new ImportSummary();
        importSummary.setStatus(ImportStatus.ERROR);
        importSummary.setDescription("Data set import failed within group number: " + groupCount);
        notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
        log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
    }
    executor.shutdown();
    notifier.update(id, INFO, "ADX data import done", true).addTaskSummary(id, importSummary);
    ImportCount c = importSummary.getImportCount();
    log.info("ADX data import done, imported: " + c.getImported() + ", updated: " + c.getUpdated() + ", deleted: " + c.getDeleted() + ", ignored: " + c.getIgnored());
    return importSummary;
}
Also used : XMLOutputFactory(javax.xml.stream.XMLOutputFactory) TaskId(org.hisp.dhis.scheduling.TaskId) DataSet(org.hisp.dhis.dataset.DataSet) ImportSummary(org.hisp.dhis.dxf2.importsummary.ImportSummary) PipedOutputStream(java.io.PipedOutputStream) DataElement(org.hisp.dhis.dataelement.DataElement) CachingMap(org.hisp.dhis.commons.collection.CachingMap) XMLStreamWriter(javax.xml.stream.XMLStreamWriter) ExecutionException(java.util.concurrent.ExecutionException) XMLReader(org.hisp.staxwax.reader.XMLReader) ImportConflict(org.hisp.dhis.dxf2.importsummary.ImportConflict) TimeoutException(java.util.concurrent.TimeoutException) ImportCount(org.hisp.dhis.dxf2.importsummary.ImportCount) IdScheme(org.hisp.dhis.common.IdScheme) IOException(java.io.IOException) IdentifiableObjectCallable(org.hisp.dhis.system.callable.IdentifiableObjectCallable) LinkedList(java.util.LinkedList) XMLStreamException(javax.xml.stream.XMLStreamException) ExecutorService(java.util.concurrent.ExecutorService) ImportOptions(org.hisp.dhis.dxf2.common.ImportOptions)

Example 37 with PipedOutputStream

use of java.io.PipedOutputStream in project accumulo by apache.

the class PasswordConverterTest method setup.

@Before
public void setup() throws IOException {
    argv = new String[] { "--password", "" };
    password = new Password();
    PipedInputStream in = new PipedInputStream();
    PipedOutputStream out = new PipedOutputStream(in);
    OutputStreamWriter osw = new OutputStreamWriter(out);
    osw.write("secret");
    osw.close();
    System.setIn(in);
}
Also used : PipedOutputStream(java.io.PipedOutputStream) OutputStreamWriter(java.io.OutputStreamWriter) PipedInputStream(java.io.PipedInputStream) Before(org.junit.Before)

Example 38 with PipedOutputStream

use of java.io.PipedOutputStream in project smarthome by eclipse.

the class JSONResponse method createResponse.

private Response createResponse(Status status, Object entity) {
    ResponseBuilder rp = responseBuilder(status);
    if (entity == null) {
        return rp.build();
    }
    // The PipedOutputStream will only be closed by the writing thread
    // since closing it during this method call would be too early.
    // The receiver of the response will read from the pipe after this method returns.
    PipedOutputStream out = new PipedOutputStream();
    try {
        // we will not actively close the PipedInputStream since it is read by the receiving end
        // and will be GC'ed once the response is consumed.
        PipedInputStream in = new PipedInputStream(out);
        rp.entity(in);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
    Thread writerThread = new Thread(() -> {
        try (JsonWriter jsonWriter = new JsonWriter(new BufferedWriter(new OutputStreamWriter(out)))) {
            if (entity != null) {
                gson.toJson(entity, entity.getClass(), jsonWriter);
                jsonWriter.flush();
            }
        } catch (IOException | JsonIOException e) {
            logger.error("Error streaming JSON through PipedInpuStream/PipedOutputStream: ", e);
        }
    });
    // daemonize thread to permit the JVM shutdown even if we stream JSON.
    writerThread.setDaemon(true);
    writerThread.start();
    return rp.build();
}
Also used : JsonIOException(com.google.gson.JsonIOException) PipedOutputStream(java.io.PipedOutputStream) OutputStreamWriter(java.io.OutputStreamWriter) PipedInputStream(java.io.PipedInputStream) IOException(java.io.IOException) JsonIOException(com.google.gson.JsonIOException) ResponseBuilder(javax.ws.rs.core.Response.ResponseBuilder) JsonWriter(com.google.gson.stream.JsonWriter) BufferedWriter(java.io.BufferedWriter)

Example 39 with PipedOutputStream

use of java.io.PipedOutputStream in project data-prep by Talend.

the class DataSetService method updateRawDataSet.

/**
 * Updates a data set content and metadata. If no data set exists for given id, data set is silently created.
 *
 * @param dataSetId The id of data set to be updated.
 * @param name The new name for the data set. Empty name (or <code>null</code>) does not update dataset name.
 * @param dataSetContent The new content for the data set. If empty, existing content will <b>not</b> be replaced.
 * For delete operation, look at {@link #delete(String)}.
 */
@RequestMapping(value = "/datasets/{id}/raw", method = PUT)
@ApiOperation(value = "Update a data set by id", notes = "Update a data set content based on provided id and PUT body. Id should be a UUID returned by the list operation. Not valid or non existing data set id returns empty content. For documentation purposes, body is typed as 'text/plain' but operation accepts binary content too.")
@Timed
@VolumeMetered
public String updateRawDataSet(// 
@PathVariable(value = "id") @ApiParam(name = "id", value = "Id of the data set to update") String dataSetId, // 
@RequestParam(value = "name", required = false) @ApiParam(name = "name", value = "New value for the data set name") String name, // 
@RequestParam(value = "size", required = false) @ApiParam(name = "size", value = "The size of the dataSet") Long size, @ApiParam(value = "content") InputStream dataSetContent) {
    LOG.debug("updating dataset content #{}", dataSetId);
    if (name != null) {
        checkDataSetName(name);
    }
    DataSetMetadata currentDataSetMetadata = dataSetMetadataRepository.get(dataSetId);
    if (currentDataSetMetadata == null) {
        return create(name, null, size, TEXT_PLAIN_VALUE, dataSetContent);
    } else {
        // just like the creation, let's make sure invalid size forbids dataset creation
        if (size != null && size < 0) {
            LOG.warn("invalid size provided {}", size);
            throw new TDPException(UNSUPPORTED_CONTENT);
        }
        final UpdateDataSetCacheKey cacheKey = new UpdateDataSetCacheKey(currentDataSetMetadata.getId());
        final DistributedLock lock = dataSetMetadataRepository.createDatasetMetadataLock(currentDataSetMetadata.getId());
        try {
            lock.lock();
            // check the size if it's available (quick win)
            if (size != null && size > 0) {
                quotaService.checkIfAddingSizeExceedsAvailableStorage(Math.abs(size - currentDataSetMetadata.getDataSetSize()));
            }
            final DataSetMetadataBuilder datasetBuilder = metadataBuilder.metadata().id(currentDataSetMetadata.getId());
            datasetBuilder.copyNonContentRelated(currentDataSetMetadata);
            datasetBuilder.modified(System.currentTimeMillis());
            if (!StringUtils.isEmpty(name)) {
                datasetBuilder.name(name);
            }
            final DataSetMetadata updatedDataSetMetadata = datasetBuilder.build();
            // Save data set content into cache to make sure there's enough space in the content store
            final long maxDataSetSizeAllowed = getMaxDataSetSizeAllowed();
            final StrictlyBoundedInputStream sizeCalculator = new StrictlyBoundedInputStream(dataSetContent, maxDataSetSizeAllowed);
            try (OutputStream cacheEntry = cacheManager.put(cacheKey, TimeToLive.DEFAULT)) {
                IOUtils.copy(sizeCalculator, cacheEntry);
            }
            // once fully copied to the cache, we know for sure that the content store has enough space, so let's copy
            // from the cache to the content store
            PipedInputStream toContentStore = new PipedInputStream();
            PipedOutputStream fromCache = new PipedOutputStream(toContentStore);
            Runnable r = () -> {
                try (final InputStream input = cacheManager.get(cacheKey)) {
                    IOUtils.copy(input, fromCache);
                    // it's important to close this stream, otherwise the piped stream will never close
                    fromCache.close();
                } catch (IOException e) {
                    throw new TDPException(UNABLE_TO_CREATE_OR_UPDATE_DATASET, e);
                }
            };
            executor.execute(r);
            contentStore.storeAsRaw(updatedDataSetMetadata, toContentStore);
            // update the dataset metadata with its new size
            updatedDataSetMetadata.setDataSetSize(sizeCalculator.getTotal());
            dataSetMetadataRepository.save(updatedDataSetMetadata);
            // publishing update event
            publisher.publishEvent(new DatasetUpdatedEvent(updatedDataSetMetadata));
        } catch (StrictlyBoundedInputStream.InputStreamTooLargeException e) {
            LOG.warn("Dataset update {} cannot be done, new content is too big", currentDataSetMetadata.getId());
            throw new TDPException(MAX_STORAGE_MAY_BE_EXCEEDED, e, build().put("limit", e.getMaxSize()));
        } catch (IOException e) {
            LOG.error("Error updating the dataset", e);
            throw new TDPException(UNABLE_TO_CREATE_OR_UPDATE_DATASET, e);
        } finally {
            dataSetContentToNull(dataSetContent);
            // whatever the outcome the cache needs to be cleaned
            if (cacheManager.has(cacheKey)) {
                cacheManager.evict(cacheKey);
            }
            lock.unlock();
        }
        // Content was changed, so queue events (format analysis, content indexing for search...)
        analyzeDataSet(currentDataSetMetadata.getId(), true, emptyList());
        return currentDataSetMetadata.getId();
    }
}
Also used : DataSetMetadataBuilder(org.talend.dataprep.dataset.DataSetMetadataBuilder) PipedInputStream(java.io.PipedInputStream) StrictlyBoundedInputStream(org.talend.dataprep.dataset.store.content.StrictlyBoundedInputStream) InputStream(java.io.InputStream) PipedOutputStream(java.io.PipedOutputStream) NullOutputStream(org.apache.commons.io.output.NullOutputStream) OutputStream(java.io.OutputStream) PipedOutputStream(java.io.PipedOutputStream) PipedInputStream(java.io.PipedInputStream) IOException(java.io.IOException) DataSetMetadata(org.talend.dataprep.api.dataset.DataSetMetadata) TDPException(org.talend.dataprep.exception.TDPException) DistributedLock(org.talend.dataprep.lock.DistributedLock) StrictlyBoundedInputStream(org.talend.dataprep.dataset.store.content.StrictlyBoundedInputStream) DatasetUpdatedEvent(org.talend.dataprep.dataset.event.DatasetUpdatedEvent) UpdateDataSetCacheKey(org.talend.dataprep.dataset.service.cache.UpdateDataSetCacheKey) VolumeMetered(org.talend.dataprep.metrics.VolumeMetered) Timed(org.talend.dataprep.metrics.Timed) ApiOperation(io.swagger.annotations.ApiOperation) RequestMapping(org.springframework.web.bind.annotation.RequestMapping)

Example 40 with PipedOutputStream

use of java.io.PipedOutputStream in project org.csstudio.display.builder by kasemir.

the class WorkspaceResourceHelperImpl method writeWorkspaceResource.

@Override
public OutputStream writeWorkspaceResource(final String resource_name) throws Exception {
    final IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot();
    final IFile file = root.getFile(new Path(resource_name));
    // IFile API requires an InputStream for the content.
    // That content, however, doesn't exist at this time, because
    // it's about to be written to an OutputStream by the caller
    // of this function.
    // -> Provide pipe, with background job to read from pipe and write the file
    final PipedOutputStream buf = new PipedOutputStream();
    final PipedInputStream input = new PipedInputStream(buf);
    final IJobFunction writer = monitor -> {
        try {
            if (file.exists())
                file.setContents(input, true, false, monitor);
            else
                file.create(input, true, monitor);
        } catch (Exception ex) {
            logger.log(Level.WARNING, "Cannot write to " + resource_name, ex);
        }
        return Status.OK_STATUS;
    };
    Job.create("Workspace Writer", writer).schedule();
    // Provide caller with output end of pipe to fill
    return buf;
}
Also used : Path(org.eclipse.core.runtime.Path) OutputStream(java.io.OutputStream) ResourcesPlugin(org.eclipse.core.resources.ResourcesPlugin) Job(org.eclipse.core.runtime.jobs.Job) Status(org.eclipse.core.runtime.Status) ModelPlugin.logger(org.csstudio.display.builder.model.ModelPlugin.logger) PipedOutputStream(java.io.PipedOutputStream) Level(java.util.logging.Level) IWorkspaceRoot(org.eclipse.core.resources.IWorkspaceRoot) IJobFunction(org.eclipse.core.runtime.jobs.IJobFunction) PipedInputStream(java.io.PipedInputStream) Path(org.eclipse.core.runtime.Path) IFile(org.eclipse.core.resources.IFile) InputStream(java.io.InputStream) URIUtil(org.eclipse.core.filesystem.URIUtil) IFile(org.eclipse.core.resources.IFile) IWorkspaceRoot(org.eclipse.core.resources.IWorkspaceRoot) IJobFunction(org.eclipse.core.runtime.jobs.IJobFunction) PipedOutputStream(java.io.PipedOutputStream) PipedInputStream(java.io.PipedInputStream)

Aggregations

PipedOutputStream (java.io.PipedOutputStream)227 PipedInputStream (java.io.PipedInputStream)204 IOException (java.io.IOException)91 Test (org.junit.Test)55 InputStream (java.io.InputStream)28 OutputStream (java.io.OutputStream)24 BinaryDecoder (co.cask.cdap.common.io.BinaryDecoder)21 BinaryEncoder (co.cask.cdap.common.io.BinaryEncoder)21 PrintStream (java.io.PrintStream)21 ByteArrayOutputStream (java.io.ByteArrayOutputStream)19 ReflectionDatumReader (co.cask.cdap.internal.io.ReflectionDatumReader)17 TypeToken (com.google.common.reflect.TypeToken)17 InputStreamReader (java.io.InputStreamReader)16 DataInputStream (java.io.DataInputStream)14 DataOutputStream (java.io.DataOutputStream)14 BufferedReader (java.io.BufferedReader)13 Before (org.junit.Before)12 ByteArrayInputStream (java.io.ByteArrayInputStream)10 ExecutorService (java.util.concurrent.ExecutorService)8 ArrayList (java.util.ArrayList)7