Search in sources :

Example 31 with UncheckedIOException

use of java.io.UncheckedIOException in project gocd by gocd.

the class GoDashboardPipelineGroup method etag.

public String etag() {
    try {
        MessageDigest digest = DigestUtils.getSha256Digest();
        OutputStreamWriter outputStreamWriter = new OutputStreamWriter(new DigestOutputStream(new NullOutputStream(), digest));
        outputStreamWriter.write(name);
        outputStreamWriter.write("/");
        outputStreamWriter.write(Integer.toString(permissions.hashCode()));
        outputStreamWriter.write("[");
        for (Map.Entry<String, GoDashboardPipeline> entry : pipelines.entrySet()) {
            long lastUpdatedTimeStamp = entry.getValue().getLastUpdatedTimeStamp();
            outputStreamWriter.write(entry.getKey());
            outputStreamWriter.write(":");
            outputStreamWriter.write(Long.toString(lastUpdatedTimeStamp));
        }
        outputStreamWriter.write("]");
        outputStreamWriter.flush();
        return Hex.encodeHexString(digest.digest());
    } catch (IOException e) {
        throw new UncheckedIOException(e);
    }
}
Also used : DigestOutputStream(java.security.DigestOutputStream) OutputStreamWriter(java.io.OutputStreamWriter) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) UncheckedIOException(java.io.UncheckedIOException) MessageDigest(java.security.MessageDigest) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) NullOutputStream(org.apache.commons.io.output.NullOutputStream)

Example 32 with UncheckedIOException

use of java.io.UncheckedIOException in project pravega by pravega.

the class ContainerReadIndexTests method testReadDirect.

/**
 * Tests the readDirect() method on the ReadIndex.
 */
@Test
public void testReadDirect() throws Exception {
    final int randomAppendLength = 1024;
    @Cleanup TestContext context = new TestContext();
    ArrayList<Long> segmentIds = new ArrayList<>();
    final long segmentId = createSegment(0, context);
    final UpdateableSegmentMetadata segmentMetadata = context.metadata.getStreamSegmentMetadata(segmentId);
    segmentIds.add(segmentId);
    HashMap<Long, ArrayList<Long>> transactionsBySegment = createTransactions(segmentIds, 1, context);
    final long mergedTxId = transactionsBySegment.get(segmentId).get(0);
    // Add data to all segments.
    HashMap<Long, ByteArrayOutputStream> segmentContents = new HashMap<>();
    transactionsBySegment.values().forEach(segmentIds::addAll);
    appendData(segmentIds, segmentContents, context);
    // Mark everything so far (minus a few bytes) as being written to storage.
    segmentMetadata.setStorageLength(segmentMetadata.getLength() - 100);
    // Now partially merge a second transaction
    final long mergedTxOffset = beginMergeTransaction(mergedTxId, segmentMetadata, segmentContents, context);
    // Add one more append after all of this.
    final long endOfMergedDataOffset = segmentMetadata.getLength();
    byte[] appendData = new byte[randomAppendLength];
    new Random(0).nextBytes(appendData);
    appendSingleWrite(segmentId, appendData, context);
    recordAppend(segmentId, appendData, segmentContents);
    // Verify we are not allowed to read from the range which has already been committed to Storage (invalid arguments).
    for (AtomicLong offset = new AtomicLong(0); offset.get() < segmentMetadata.getStorageLength(); offset.incrementAndGet()) {
        AssertExtensions.assertThrows(String.format("readDirect allowed reading from an illegal offset (%s).", offset), () -> context.readIndex.readDirect(segmentId, offset.get(), 1), ex -> ex instanceof IllegalArgumentException);
    }
    // Verify that any reads overlapping a merged transaction return null (that is, we cannot retrieve the requested data).
    for (long offset = mergedTxOffset - 1; offset < endOfMergedDataOffset; offset++) {
        InputStream resultStream = context.readIndex.readDirect(segmentId, offset, 2);
        Assert.assertNull("readDirect() returned data overlapping a partially merged transaction", resultStream);
    }
    // Verify that we can read from any other offset.
    final byte[] expectedData = segmentContents.get(segmentId).toByteArray();
    BiConsumer<Long, Long> verifyReadResult = (startOffset, endOffset) -> {
        int readLength = (int) (endOffset - startOffset);
        while (readLength > 0) {
            InputStream actualDataStream;
            try {
                actualDataStream = context.readIndex.readDirect(segmentId, startOffset, readLength);
            } catch (StreamSegmentNotExistsException ex) {
                throw new CompletionException(ex);
            }
            Assert.assertNotNull(String.format("Unexpected result when data is readily available for Offset = %s, Length = %s.", startOffset, readLength), actualDataStream);
            byte[] actualData = new byte[readLength];
            try {
                int bytesCopied = StreamHelpers.readAll(actualDataStream, actualData, 0, readLength);
                Assert.assertEquals(String.format("Unexpected number of bytes read for Offset = %s, Length = %s (pre-partial-merge).", startOffset, readLength), readLength, bytesCopied);
            } catch (IOException ex) {
                // Technically not possible.
                throw new UncheckedIOException(ex);
            }
            AssertExtensions.assertArrayEquals("Unexpected data read from the segment at offset " + startOffset, expectedData, startOffset.intValue(), actualData, 0, actualData.length);
            // Setup the read for the next test (where we read 1 less byte than now).
            readLength--;
            if (readLength % 2 == 0) {
                // For every 2 bytes of decreased read length, increase the start offset by 1. This allows for a greater
                // number of combinations to be tested.
                startOffset++;
            }
        }
    };
    // Verify that we can read the cached data just after the StorageLength but before the merged transaction.
    verifyReadResult.accept(segmentMetadata.getStorageLength(), mergedTxOffset);
    // Verify that we can read the cached data just after the merged transaction but before the end of the segment.
    verifyReadResult.accept(endOfMergedDataOffset, segmentMetadata.getLength());
}
Also used : Storage(io.pravega.segmentstore.storage.Storage) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) AssertExtensions(io.pravega.test.common.AssertExtensions) CacheKey(io.pravega.segmentstore.server.CacheKey) Cleanup(lombok.Cleanup) Random(java.util.Random) InMemoryCache(io.pravega.segmentstore.storage.mocks.InMemoryCache) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) ByteArrayInputStream(java.io.ByteArrayInputStream) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ReadResultEntryContents(io.pravega.segmentstore.contracts.ReadResultEntryContents) InMemoryStorageFactory(io.pravega.segmentstore.storage.mocks.InMemoryStorageFactory) Duration(java.time.Duration) Map(java.util.Map) CacheFactory(io.pravega.segmentstore.storage.CacheFactory) Collection(java.util.Collection) CompletionException(java.util.concurrent.CompletionException) ReadResultEntryType(io.pravega.segmentstore.contracts.ReadResultEntryType) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) StreamSegmentNameUtils(io.pravega.shared.segment.StreamSegmentNameUtils) UncheckedIOException(java.io.UncheckedIOException) List(java.util.List) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) Futures(io.pravega.common.concurrent.Futures) ReadResult(io.pravega.segmentstore.contracts.ReadResult) MetadataBuilder(io.pravega.segmentstore.server.MetadataBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ConfigHelpers(io.pravega.segmentstore.server.ConfigHelpers) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) ReadResultEntry(io.pravega.segmentstore.contracts.ReadResultEntry) BiConsumer(java.util.function.BiConsumer) Timeout(org.junit.rules.Timeout) StreamHelpers(io.pravega.common.io.StreamHelpers) StreamSegmentTruncatedException(io.pravega.segmentstore.contracts.StreamSegmentTruncatedException) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) AtomicLong(java.util.concurrent.atomic.AtomicLong) Rule(org.junit.Rule) Assert(org.junit.Assert) Collections(java.util.Collections) Cache(io.pravega.segmentstore.storage.Cache) InputStream(java.io.InputStream) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) HashMap(java.util.HashMap) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) ArrayList(java.util.ArrayList) UncheckedIOException(java.io.UncheckedIOException) ByteArrayOutputStream(java.io.ByteArrayOutputStream) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) Cleanup(lombok.Cleanup) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) AtomicLong(java.util.concurrent.atomic.AtomicLong) Random(java.util.Random) CompletionException(java.util.concurrent.CompletionException) AtomicLong(java.util.concurrent.atomic.AtomicLong) Test(org.junit.Test)

Example 33 with UncheckedIOException

use of java.io.UncheckedIOException in project janusgraph by JanusGraph.

the class ElasticSearchIndex method query.

@Override
public Stream<String> query(IndexQuery query, KeyInformation.IndexRetriever informations, BaseTransaction tx) throws BackendException {
    final ElasticSearchRequest sr = new ElasticSearchRequest();
    final Map<String, Object> esQuery = getFilter(query.getCondition(), informations.get(query.getStore()));
    sr.setQuery(compat.prepareQuery(esQuery));
    if (!query.getOrder().isEmpty()) {
        final List<IndexQuery.OrderEntry> orders = query.getOrder();
        for (final IndexQuery.OrderEntry orderEntry : orders) {
            final String order = orderEntry.getOrder().name();
            final KeyInformation information = informations.get(query.getStore()).get(orderEntry.getKey());
            final Mapping mapping = Mapping.getMapping(information);
            final Class<?> datatype = orderEntry.getDatatype();
            sr.addSort(orderEntry.getKey(), order.toLowerCase(), convertToEsDataType(datatype, mapping));
        }
    }
    sr.setFrom(0);
    if (query.hasLimit()) {
        sr.setSize(Math.min(query.getLimit(), batchSize));
    } else {
        sr.setSize(batchSize);
    }
    ElasticSearchResponse response;
    try {
        final String indexStoreName = getIndexStoreName(query.getStore());
        final String indexType = useMultitypeIndex ? query.getStore() : null;
        response = client.search(indexStoreName, indexType, compat.createRequestBody(sr, NULL_PARAMETERS), sr.getSize() >= batchSize);
        log.debug("First Executed query [{}] in {} ms", query.getCondition(), response.getTook());
        final ElasticSearchScroll resultIterator = new ElasticSearchScroll(client, response, sr.getSize());
        final Stream<RawQuery.Result<String>> toReturn = StreamSupport.stream(Spliterators.spliteratorUnknownSize(resultIterator, Spliterator.ORDERED), false);
        return (query.hasLimit() ? toReturn.limit(query.getLimit()) : toReturn).map(RawQuery.Result::getResult);
    } catch (final IOException | UncheckedIOException e) {
        throw new PermanentBackendException(e);
    }
}
Also used : IndexQuery(org.janusgraph.diskstorage.indexing.IndexQuery) PermanentBackendException(org.janusgraph.diskstorage.PermanentBackendException) Mapping(org.janusgraph.core.schema.Mapping) IndexMapping(org.janusgraph.diskstorage.es.IndexMappings.IndexMapping) UncheckedIOException(java.io.UncheckedIOException) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) KeyInformation(org.janusgraph.diskstorage.indexing.KeyInformation)

Example 34 with UncheckedIOException

use of java.io.UncheckedIOException in project ArachneCentralAPI by OHDSI.

the class EstimationPreprocessor method attachEstimationAnalysisCode.

private void attachEstimationAnalysisCode(Analysis analysis) {
    Resource resource = new ClassPathResource(ESTIMATION_ANALYSIS_SOURCE);
    try (final InputStream in = resource.getInputStream()) {
        final MultipartFile analysisFile = new MockMultipartFile(ANALYSIS_BUNDLE_FILENAME, ANALYSIS_BUNDLE_FILENAME, null, in);
        analysisService.saveFile(analysisFile, analysis.getAuthor(), analysis, analysisFile.getName(), false, null);
    } catch (IOException e) {
        LOGGER.error("Failed to add file", e);
        throw new UncheckedIOException(e);
    }
}
Also used : MockMultipartFile(org.springframework.mock.web.MockMultipartFile) MockMultipartFile(org.springframework.mock.web.MockMultipartFile) MultipartFile(org.springframework.web.multipart.MultipartFile) InputStream(java.io.InputStream) ClassPathResource(org.springframework.core.io.ClassPathResource) Resource(org.springframework.core.io.Resource) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) UncheckedIOException(java.io.UncheckedIOException) ClassPathResource(org.springframework.core.io.ClassPathResource)

Example 35 with UncheckedIOException

use of java.io.UncheckedIOException in project ArachneCentralAPI by OHDSI.

the class AnalysisController method convertToMultipartFile.

private MultipartFile convertToMultipartFile(Resource resource) {
    try {
        String rootPath = ((ClassPathResource) resource).getPath();
        String name = convertToUnixPath(rootPath.substring(rootPath.indexOf(CC_SQLS_DIR) + CC_SQLS_DIR.length() + 1));
        return new MockMultipartFile(name, name, null, readResource(CC_SQLS_DIR + "/" + name));
    } catch (IOException e) {
        throw new UncheckedIOException(e);
    }
}
Also used : MockMultipartFile(org.springframework.mock.web.MockMultipartFile) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) UncheckedIOException(java.io.UncheckedIOException) ClassPathResource(org.springframework.core.io.ClassPathResource)

Aggregations

UncheckedIOException (java.io.UncheckedIOException)76 IOException (java.io.IOException)72 Path (java.nio.file.Path)13 InputStream (java.io.InputStream)7 ArrayList (java.util.ArrayList)7 Test (org.junit.Test)7 File (java.io.File)6 Arrays (java.util.Arrays)6 HashMap (java.util.HashMap)6 Map (java.util.Map)6 List (java.util.List)5 Collectors (java.util.stream.Collectors)5 BufferedReader (java.io.BufferedReader)4 URL (java.net.URL)4 IntStream (java.util.stream.IntStream)4 Protein (de.bioforscher.jstructure.model.structure.Protein)3 InterruptedIOException (java.io.InterruptedIOException)3 Random (java.util.Random)3 CountDownLatch (java.util.concurrent.CountDownLatch)3 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)3